Skip to content

Commit

Permalink
Merge remote-tracking branch 'IQSS/develop' into DANS_Performance2
Browse files Browse the repository at this point in the history
  • Loading branch information
qqmyers committed Oct 8, 2024
2 parents 94f9c72 + d50c484 commit ff6dbe5
Show file tree
Hide file tree
Showing 12 changed files with 153 additions and 36 deletions.
1 change: 1 addition & 0 deletions doc/release-notes/10901deaccessioned file edit fix.md
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
When a dataset was deaccessioned and was the only previous version it will cause an error when trying to update the files.
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
## Fix facets filter labels not translated in result block

On the main page, it's possible to filter results using search facets. If internationalization (i18n) has been activated in the Dataverse installation, allowing pages to be displayed in several languages, the facets are translated in the filter column. However, they aren't translated in the search results and remain in the default language, English.

This version of Dataverse fix this, and includes internationalization in the facets visible in the search results section.

For more information, see issue [#9408](https://github.com/IQSS/dataverse/issues/9408) and pull request [#10158](https://github.com/IQSS/dataverse/pull/10158)
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
The following API have been added:

/api/datasets/{datasetId}/links

It lists the linked dataverses to a dataset. It can be executed only by administrators.
12 changes: 9 additions & 3 deletions doc/sphinx-guides/source/admin/dataverses-datasets.rst
Original file line number Diff line number Diff line change
Expand Up @@ -129,15 +129,21 @@ Lists the link(s) created between a dataset and a Dataverse collection (see the

curl -H "X-Dataverse-key: $API_TOKEN" http://$SERVER/api/datasets/$linked-dataset-id/links

It returns a list in the following format:
It returns a list in the following format (new format as of v6.4):

.. code-block:: json
{
"status": "OK",
"data": {
"dataverses that link to dataset id 56782": [
"crc990 (id 18802)"
"id": 5,
"identifier": "FK2/OTCWMM",
"linked-dataverses": [
{
"id": 2,
"alias": "dataverse1",
"displayName": "Lab experiments 2023 June"
}
]
}
}
Expand Down
2 changes: 1 addition & 1 deletion doc/sphinx-guides/source/api/changelog.rst
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ v6.4

- **/api/datasets/$dataset-id/modifyRegistration**: Changed from GET to POST
- **/api/datasets/modifyRegistrationPIDMetadataAll**: Changed from GET to POST

- **/api/datasets/{identifier}/links**: The GET endpoint returns a list of Dataverses linked to the given Dataset. The format of the response has changes for v6.4 making it backward incompatible.

v6.3
----
Expand Down
9 changes: 7 additions & 2 deletions src/main/java/edu/harvard/iq/dataverse/Dataset.java
Original file line number Diff line number Diff line change
Expand Up @@ -333,15 +333,20 @@ public DatasetVersion getLatestVersion() {
return getVersions().get(0);
}

public DatasetVersion getLatestVersionForCopy() {
public DatasetVersion getLatestVersionForCopy(boolean includeDeaccessioned) {
for (DatasetVersion testDsv : getVersions()) {
if (testDsv.isReleased() || testDsv.isArchived()) {
if (testDsv.isReleased() || testDsv.isArchived()
|| (testDsv.isDeaccessioned() && includeDeaccessioned)) {
return testDsv;
}
}
return getVersions().get(0);
}

public DatasetVersion getLatestVersionForCopy(){
return getLatestVersionForCopy(false);
}

public List<DatasetVersion> getVersions() {
return versions;
}
Expand Down
10 changes: 8 additions & 2 deletions src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
Original file line number Diff line number Diff line change
Expand Up @@ -2077,10 +2077,16 @@ public Response getLinks(@Context ContainerRequestContext crc, @PathParam("id")
List<Dataverse> dvsThatLinkToThisDatasetId = dataverseSvc.findDataversesThatLinkToThisDatasetId(datasetId);
JsonArrayBuilder dataversesThatLinkToThisDatasetIdBuilder = Json.createArrayBuilder();
for (Dataverse dataverse : dvsThatLinkToThisDatasetId) {
dataversesThatLinkToThisDatasetIdBuilder.add(dataverse.getAlias() + " (id " + dataverse.getId() + ")");
JsonObjectBuilder datasetBuilder = Json.createObjectBuilder();
datasetBuilder.add("id", dataverse.getId());
datasetBuilder.add("alias", dataverse.getAlias());
datasetBuilder.add("displayName", dataverse.getDisplayName());
dataversesThatLinkToThisDatasetIdBuilder.add(datasetBuilder.build());
}
JsonObjectBuilder response = Json.createObjectBuilder();
response.add("dataverses that link to dataset id " + datasetId, dataversesThatLinkToThisDatasetIdBuilder);
response.add("id", datasetId);
response.add("identifier", dataset.getIdentifier());
response.add("linked-dataverses", dataversesThatLinkToThisDatasetIdBuilder);
return ok(response);
} catch (WrappedResponse wr) {
return wr.getResponse();
Expand Down
4 changes: 2 additions & 2 deletions src/main/java/edu/harvard/iq/dataverse/api/Files.java
Original file line number Diff line number Diff line change
Expand Up @@ -343,10 +343,10 @@ public Response deleteFileInDataset(@Context ContainerRequestContext crc, @PathP
DataFile dataFile = findDataFileOrDie(fileIdOrPersistentId);
FileMetadata fileToDelete = dataFile.getLatestFileMetadata();
Dataset dataset = dataFile.getOwner();
DatasetVersion v = dataset.getOrCreateEditVersion();
dataset.getOrCreateEditVersion();
deletePhysicalFile = !dataFile.isReleased();

UpdateDatasetVersionCommand update_cmd = new UpdateDatasetVersionCommand(dataset, dvRequest, Arrays.asList(fileToDelete), v);
UpdateDatasetVersionCommand update_cmd = new UpdateDatasetVersionCommand(dataset, dvRequest, Arrays.asList(fileToDelete));
update_cmd.setValidateLenient(true);

try {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ public Dataset execute(CommandContext ctxt) throws CommandException {
DatasetVersion latestVersion = theDataset.getLatestVersion();
if(persistedVersion==null) {
Long id = latestVersion.getId();
persistedVersion = ctxt.datasetVersion().find(id!=null ? id: getDataset().getLatestVersionForCopy().getId());
persistedVersion = ctxt.datasetVersion().find(id!=null ? id: getDataset().getLatestVersionForCopy(true).getId());
}
//Get or create (currently only when called with fmVarMet != null) a new edit version
DatasetVersion editVersion = theDataset.getOrCreateEditVersion(fmVarMet);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.MissingResourceException;
import java.util.Optional;
import java.util.Set;
import java.util.logging.Logger;
Expand Down Expand Up @@ -1231,40 +1232,33 @@ public String getTypeFromFilterQuery(String filterQuery) {
}

public List<String> getFriendlyNamesFromFilterQuery(String filterQuery) {


if ((filterQuery == null)||
(datasetfieldFriendlyNamesBySolrField == null)||
(staticSolrFieldFriendlyNamesBySolrField==null)){

if ((filterQuery == null) ||
(datasetfieldFriendlyNamesBySolrField == null) ||
(staticSolrFieldFriendlyNamesBySolrField == null)) {
return null;
}
if(!filterQuery.contains(":")) {

if (!filterQuery.contains(":")) {
return null;
}

int index = filterQuery.indexOf(":");
String key = filterQuery.substring(0,index);
String value = filterQuery.substring(index+1);

List<String> friendlyNames = new ArrayList<>();
// friendlyNames get 2 entries : key and value
List<String> friendlyNames = new ArrayList<>(2);

// Get dataset field friendly name from default ressource bundle file
String datasetfieldFriendyName = datasetfieldFriendlyNamesBySolrField.get(key);
if (datasetfieldFriendyName != null) {
friendlyNames.add(datasetfieldFriendyName);
} else {
// Get non dataset field friendly name from "staticSearchFields" ressource bundle file
String nonDatasetSolrField = staticSolrFieldFriendlyNamesBySolrField.get(key);
if (nonDatasetSolrField != null) {
friendlyNames.add(nonDatasetSolrField);
} else if (key.equals(SearchFields.PUBLICATION_STATUS)) {
/**
* @todo Refactor this quick fix for
* https://github.com/IQSS/dataverse/issues/618 . We really need
* to get rid of all the reflection that's happening with
* solrQueryResponse.getStaticSolrFieldFriendlyNamesBySolrField()
* and
*/
friendlyNames.add("Publication Status");
} else {
// meh. better than nuthin'
friendlyNames.add(key);
Expand All @@ -1276,9 +1270,13 @@ public List<String> getFriendlyNamesFromFilterQuery(String filterQuery) {
String valueWithoutQuotes = noTrailingQuote;

if (key.equals(SearchFields.METADATA_TYPES) && getDataverse() != null && getDataverse().getMetadataBlockFacets() != null) {
Optional<String> friendlyName = getDataverse().getMetadataBlockFacets().stream().filter(block -> block.getMetadataBlock().getName().equals(valueWithoutQuotes)).findFirst().map(block -> block.getMetadataBlock().getLocaleDisplayFacet());
Optional<String> friendlyName = getDataverse().getMetadataBlockFacets()
.stream()
.filter(block -> block.getMetadataBlock().getName().equals(valueWithoutQuotes))
.findFirst()
.map(block -> block.getMetadataBlock().getLocaleDisplayFacet());
logger.fine(String.format("action=getFriendlyNamesFromFilterQuery key=%s value=%s friendlyName=%s", key, value, friendlyName));
if(friendlyName.isPresent()) {
if (friendlyName.isPresent()) {
friendlyNames.add(friendlyName.get());
return friendlyNames;
}
Expand All @@ -1290,7 +1288,15 @@ public List<String> getFriendlyNamesFromFilterQuery(String filterQuery) {
}
}

friendlyNames.add(valueWithoutQuotes);
// Get value friendly name from default ressource bundle file
String valueFriendlyName;
try {
valueFriendlyName = BundleUtil.getStringFromPropertyFile(noTrailingQuote, "Bundle");
} catch (MissingResourceException e) {
valueFriendlyName = noTrailingQuote;
}

friendlyNames.add(valueFriendlyName);
return friendlyNames;
}

Expand Down
82 changes: 82 additions & 0 deletions src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java
Original file line number Diff line number Diff line change
Expand Up @@ -667,6 +667,60 @@ public void testCreatePublishDestroyDataset() {
deleteDatasetResponse.prettyPrint();
assertEquals(200, deleteDatasetResponse.getStatusCode());

// Start of test of deleting a file from a deaccessioned version.

// Create Dataset for deaccession test.
Response deaccessionTestDataset = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
deaccessionTestDataset.prettyPrint();
deaccessionTestDataset.then().assertThat().statusCode(CREATED.getStatusCode());
Integer deaccessionTestDatasetId = UtilIT.getDatasetIdFromResponse(deaccessionTestDataset);

// File upload for deaccession test.
String pathToFile = "src/main/webapp/resources/images/dataverseproject.png";
Response uploadResponse = UtilIT.uploadFileViaNative(deaccessionTestDatasetId.toString(), pathToFile, apiToken);
uploadResponse.prettyPrint();
uploadResponse.then().assertThat().statusCode(OK.getStatusCode());
Integer deaccessionTestFileId = JsonPath.from(uploadResponse.body().asString()).getInt("data.files[0].dataFile.id");

// Publish Dataset for deaccession test.
Response deaccessionTestPublishResponse = UtilIT.publishDatasetViaNativeApi(deaccessionTestDatasetId, "major", apiToken);
deaccessionTestPublishResponse.prettyPrint();

// Deaccession Dataset for deaccession test.
Response deaccessionTestDatasetResponse = UtilIT.deaccessionDataset(deaccessionTestDatasetId, DS_VERSION_LATEST_PUBLISHED, "Test deaccession reason.", null, apiToken);
deaccessionTestDatasetResponse.prettyPrint();
deaccessionTestDatasetResponse.then().assertThat().statusCode(OK.getStatusCode());

// Version check for deaccession test - Deaccessioned.
Response deaccessionTestVersions = UtilIT.getDatasetVersions(deaccessionTestDatasetId.toString(), apiToken);
deaccessionTestVersions.prettyPrint();
deaccessionTestVersions.then().assertThat()
.body("data[0].latestVersionPublishingState", equalTo("DEACCESSIONED"))
.statusCode(OK.getStatusCode());

// File deletion / Draft creation due diligence check for deaccession test.
Response deaccessionTestDeleteFile = UtilIT.deleteFileInDataset(deaccessionTestFileId, apiToken);
deaccessionTestDeleteFile.prettyPrint();
deaccessionTestDeleteFile
.then().assertThat()
.statusCode(OK.getStatusCode());

// Version check for deaccession test - Draft.
deaccessionTestVersions = UtilIT.getDatasetVersions(deaccessionTestDatasetId.toString(), apiToken);
deaccessionTestVersions.prettyPrint();
deaccessionTestVersions.then().assertThat()
.body("data[0].latestVersionPublishingState", equalTo("DRAFT"))
.statusCode(OK.getStatusCode());

// Deleting Dataset for deaccession test.
Response deaccessionTestDelete = UtilIT.destroyDataset(deaccessionTestDatasetId, apiToken);
deaccessionTestDelete.prettyPrint();
deaccessionTestDelete.then()
.assertThat()
.statusCode(OK.getStatusCode());

// End of deaccession test.

Response deleteDataverseResponse = UtilIT.deleteDataverse(dataverseAlias, apiToken);
deleteDataverseResponse.prettyPrint();
assertEquals(200, deleteDataverseResponse.getStatusCode());
Expand Down Expand Up @@ -2963,6 +3017,34 @@ public void testLinkingDatasets() {
linkDataset.then().assertThat()
.statusCode(OK.getStatusCode());

// Link another to test the list of linked datasets
Response createDataverse3 = UtilIT.createRandomDataverse(apiToken);
createDataverse3.prettyPrint();
createDataverse3.then().assertThat()
.statusCode(CREATED.getStatusCode());
String dataverse3Alias = UtilIT.getAliasFromResponse(createDataverse3);
Integer dataverse3Id = UtilIT.getDatasetIdFromResponse(createDataverse3);
linkDataset = UtilIT.linkDataset(datasetPid, dataverse3Alias, superuserApiToken);
linkDataset.prettyPrint();
linkDataset.then().assertThat()
.statusCode(OK.getStatusCode());
// get the list in Json format
Response linkDatasetsResponse = UtilIT.getDatasetLinks(datasetPid, superuserApiToken);
linkDatasetsResponse.prettyPrint();
linkDatasetsResponse.then().assertThat()
.statusCode(OK.getStatusCode());
JsonObject linkDatasets = Json.createReader(new StringReader(linkDatasetsResponse.asString())).readObject();
JsonArray lst = linkDatasets.getJsonObject("data").getJsonArray("linked-dataverses");
List<Integer> ids = List.of(dataverse2Id, dataverse3Id);
List<Integer> uniqueids = new ArrayList<>();
assertEquals(ids.size(), lst.size());
for (int i = 0; i < lst.size(); i++) {
int id = lst.getJsonObject(i).getInt("id");
assertTrue(ids.contains(id));
assertFalse(uniqueids.contains(id));
uniqueids.add(id);
}

//Experimental code for trying to trick test into thinking the dataset has been harvested
/*
createDataset = UtilIT.createRandomDatasetViaNativeApi(dataverse1Alias, apiToken);
Expand Down
7 changes: 3 additions & 4 deletions src/test/java/edu/harvard/iq/dataverse/api/MoveIT.java
Original file line number Diff line number Diff line change
Expand Up @@ -278,8 +278,8 @@ public void testMoveLinkedDataset() {
.body("message", equalTo("Use the query parameter forceMove=true to complete the move. This dataset is linked to the new host dataverse or one of its parents. This move would remove the link to this dataset. "));

JsonObject linksBeforeData = Json.createReader(new StringReader(getLinksBefore.asString())).readObject();
assertEquals("OK", linksBeforeData.getString("status"));
assertEquals(dataverse2Alias + " (id " + dataverse2Id + ")", linksBeforeData.getJsonObject("data").getJsonArray("dataverses that link to dataset id " + datasetId).getString(0));
assertEquals(datasetId, linksBeforeData.getJsonObject("data").getInt("id"));
assertEquals(dataverse2Id, linksBeforeData.getJsonObject("data").getJsonArray("linked-dataverses").get(0).asJsonObject().getInt("id"));

boolean forceMove = true;
Response forceMoveLinkedDataset = UtilIT.moveDataset(datasetId.toString(), dataverse2Alias, forceMove, superuserApiToken);
Expand Down Expand Up @@ -308,8 +308,7 @@ public void testMoveLinkedDataset() {

JsonObject linksAfterData = Json.createReader(new StringReader(getLinksAfter.asString())).readObject();
assertEquals("OK", linksAfterData.getString("status"));
assertEquals(0, linksAfterData.getJsonObject("data").getJsonArray("dataverses that link to dataset id " + datasetId).size());

assertEquals(0, linksAfterData.getJsonObject("data").getJsonArray("linked-dataverses").size());
}

@Test
Expand Down

0 comments on commit ff6dbe5

Please sign in to comment.