From ae56c4adea3d612281858bc8026560482723a349 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 28 Jul 2021 17:02:24 +0200 Subject: [PATCH 1/9] chore(payara): update everything to Payara 5.2021.5 #8030 --- conf/docker-aio/0prep_deps.sh | 4 ++-- conf/docker-aio/c8.dockerfile | 2 +- doc/release-notes/7700-upgrade-payara.md | 6 +++--- doc/sphinx-guides/source/developers/dev-environment.rst | 4 ++-- doc/sphinx-guides/source/installation/prerequisites.rst | 6 +++--- downloads/download.sh | 2 +- pom.xml | 2 +- scripts/vagrant/setup.sh | 2 +- .../edu/harvard/iq/dataverse/util/DataSourceProducer.java | 2 +- 9 files changed, 15 insertions(+), 15 deletions(-) diff --git a/conf/docker-aio/0prep_deps.sh b/conf/docker-aio/0prep_deps.sh index c26492b2d25..f1832fd2ec3 100755 --- a/conf/docker-aio/0prep_deps.sh +++ b/conf/docker-aio/0prep_deps.sh @@ -4,10 +4,10 @@ if [ ! -d dv/deps ]; then fi wdir=`pwd` -if [ ! -e dv/deps/payara-5.2021.4.zip ]; then +if [ ! -e dv/deps/payara-5.2021.5.zip ]; then echo "payara dependency prep" # no more fiddly patching :) - wget https://s3-eu-west-1.amazonaws.com/payara.fish/Payara+Downloads/5.2021.4/payara-5.2021.4.zip -O dv/deps/payara-5.2021.4.zip + wget https://s3-eu-west-1.amazonaws.com/payara.fish/Payara+Downloads/5.2021.5/payara-5.2021.5.zip -O dv/deps/payara-5.2021.5.zip fi if [ ! -e dv/deps/solr-8.8.1dv.tgz ]; then diff --git a/conf/docker-aio/c8.dockerfile b/conf/docker-aio/c8.dockerfile index 82d3fdac023..515f69b8c55 100644 --- a/conf/docker-aio/c8.dockerfile +++ b/conf/docker-aio/c8.dockerfile @@ -24,7 +24,7 @@ COPY disableipv6.conf /etc/sysctl.d/ RUN rm /etc/httpd/conf/* COPY httpd.conf /etc/httpd/conf RUN cd /opt ; tar zxf /tmp/dv/deps/solr-8.8.1dv.tgz -RUN cd /opt ; unzip /tmp/dv/deps/payara-5.2021.4.zip ; ln -s /opt/payara5 /opt/glassfish4 +RUN cd /opt ; unzip /tmp/dv/deps/payara-5.2021.5.zip ; ln -s /opt/payara5 /opt/glassfish4 # this copy of domain.xml is the result of running `asadmin set server.monitoring-service.module-monitoring-levels.jvm=LOW` on a default glassfish installation (aka - enable the glassfish REST monitir endpoint for the jvm` # this dies under Java 11, do we keep it? diff --git a/doc/release-notes/7700-upgrade-payara.md b/doc/release-notes/7700-upgrade-payara.md index de7c58bf963..40e9552bfe3 100644 --- a/doc/release-notes/7700-upgrade-payara.md +++ b/doc/release-notes/7700-upgrade-payara.md @@ -1,9 +1,9 @@ -### Payara 5.2021.4 (or Higher) Required +### Payara 5.2021.5 (or Higher) Required -Some changes in this release require an upgrade to Payara 5.2021.4 or higher. +Some changes in this release require an upgrade to Payara 5.2021.5 or higher. Instructions on how to update can be found in the -[Payara documentation](https://docs.payara.fish/community/docs/5.2021.4/documentation/user-guides/upgrade-payara.html) +[Payara documentation](https://docs.payara.fish/community/docs/5.2021.5/documentation/user-guides/upgrade-payara.html) It would likely be safer to upgrade Payara first, while still running Dataverse 5.6, and then proceed with the steps below. Upgrading from an earlier version of Payara should be a straightforward process: diff --git a/doc/sphinx-guides/source/developers/dev-environment.rst b/doc/sphinx-guides/source/developers/dev-environment.rst index 264324fb604..61ab98bf292 100755 --- a/doc/sphinx-guides/source/developers/dev-environment.rst +++ b/doc/sphinx-guides/source/developers/dev-environment.rst @@ -85,9 +85,9 @@ To install Payara, run the following commands: ``cd /usr/local`` -``sudo curl -O -L https://s3-eu-west-1.amazonaws.com/payara.fish/Payara+Downloads/5.2021.4/payara-5.2021.4.zip`` +``sudo curl -O -L https://s3-eu-west-1.amazonaws.com/payara.fish/Payara+Downloads/5.2021.5/payara-5.2021.5.zip`` -``sudo unzip payara-5.2021.4.zip`` +``sudo unzip payara-5.2021.5.zip`` ``sudo chown -R $USER /usr/local/payara5`` diff --git a/doc/sphinx-guides/source/installation/prerequisites.rst b/doc/sphinx-guides/source/installation/prerequisites.rst index 0265e390d14..e3dc04ac70b 100644 --- a/doc/sphinx-guides/source/installation/prerequisites.rst +++ b/doc/sphinx-guides/source/installation/prerequisites.rst @@ -44,7 +44,7 @@ On RHEL/derivative you can make Java 11 the default with the ``alternatives`` co Payara ------ -Payara 5.2021.4 is recommended. Newer versions might work fine, regular updates are recommended. +Payara 5.2021.5 is recommended. Newer versions might work fine, regular updates are recommended. Installing Payara ================= @@ -55,8 +55,8 @@ Installing Payara - Download and install Payara (installed in ``/usr/local/payara5`` in the example commands below):: - # wget https://s3-eu-west-1.amazonaws.com/payara.fish/Payara+Downloads/5.2021.4/payara-5.2021.4.zip - # unzip payara-5.2021.4.zip + # wget https://s3-eu-west-1.amazonaws.com/payara.fish/Payara+Downloads/5.2021.5/payara-5.2021.5.zip + # unzip payara-5.2021.5.zip # mv payara5 /usr/local If you intend to install and run Payara under a service account (and we hope you do), chown -R the Payara hierarchy to root to protect it but give the service account access to the below directories: diff --git a/downloads/download.sh b/downloads/download.sh index c9cd2942295..33476c24b76 100755 --- a/downloads/download.sh +++ b/downloads/download.sh @@ -1,5 +1,5 @@ #!/bin/sh -curl -L -O https://s3-eu-west-1.amazonaws.com/payara.fish/Payara+Downloads/5.2021.4/payara-5.2021.4.zip +curl -L -O https://s3-eu-west-1.amazonaws.com/payara.fish/Payara+Downloads/5.2021.5/payara-5.2021.5.zip curl -L -O https://archive.apache.org/dist/lucene/solr/8.8.1/solr-8.8.1.tgz curl -L -O https://search.maven.org/remotecontent?filepath=org/jboss/weld/weld-osgi-bundle/2.2.10.Final/weld-osgi-bundle-2.2.10.Final-glassfish4.jar curl -s -L http://sourceforge.net/projects/schemaspy/files/schemaspy/SchemaSpy%205.0.0/schemaSpy_5.0.0.jar/download > schemaSpy_5.0.0.jar diff --git a/pom.xml b/pom.xml index 92b0ff3d951..f76a2708752 100644 --- a/pom.xml +++ b/pom.xml @@ -26,7 +26,7 @@ false 8.0.0 - 5.2021.4 + 5.2021.5 42.2.19 1.11.762 1.2 diff --git a/scripts/vagrant/setup.sh b/scripts/vagrant/setup.sh index 292c5b56f63..24bac307709 100644 --- a/scripts/vagrant/setup.sh +++ b/scripts/vagrant/setup.sh @@ -52,7 +52,7 @@ SOLR_USER=solr echo "Ensuring Unix user '$SOLR_USER' exists" useradd $SOLR_USER || : DOWNLOAD_DIR='/dataverse/downloads' -PAYARA_ZIP="$DOWNLOAD_DIR/payara-5.2021.4.zip" +PAYARA_ZIP="$DOWNLOAD_DIR/payara-5.2021.5.zip" SOLR_TGZ="$DOWNLOAD_DIR/solr-8.8.1.tgz" if [ ! -f $PAYARA_ZIP ] || [ ! -f $SOLR_TGZ ]; then echo "Couldn't find $PAYARA_ZIP or $SOLR_TGZ! Running download script...." diff --git a/src/main/java/edu/harvard/iq/dataverse/util/DataSourceProducer.java b/src/main/java/edu/harvard/iq/dataverse/util/DataSourceProducer.java index b393ca4a605..e9ac5bfd230 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/DataSourceProducer.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/DataSourceProducer.java @@ -38,7 +38,7 @@ //}) // // ... but at this time we don't think we need any. The full list -// of properties can be found at https://docs.payara.fish/community/docs/5.2021.4/documentation/payara-server/jdbc/advanced-connection-pool-properties.html#full-list-of-properties +// of properties can be found at https://docs.payara.fish/community/docs/5.2021.5/documentation/payara-server/jdbc/advanced-connection-pool-properties.html#full-list-of-properties // // All these properties cannot be configured via MPCONFIG as Payara doesn't support this (yet). To be enhanced. // See also https://github.com/payara/Payara/issues/5024 From 259e26d748e50f62a32f7b232978954e9b219fd4 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 28 Jul 2021 12:07:39 -0400 Subject: [PATCH 2/9] add flag required to also update PID at provider during migrate --- src/main/java/edu/harvard/iq/dataverse/api/Datasets.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 97f082a8db2..f382320d8a2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -1206,7 +1206,7 @@ public Response publishDataset(@PathParam("id") String id, @QueryParam("type") S @POST @Path("{id}/actions/:releasemigrated") @Consumes("application/ld+json, application/json-ld") - public Response publishMigratedDataset(String jsonldBody, @PathParam("id") String id) { + public Response publishMigratedDataset(String jsonldBody, @PathParam("id") String id, @DefaultValue("false") @QueryParam ("updatepidatprovider") boolean contactPIDProvider) { try { AuthenticatedUser user = findAuthenticatedUserOrDie(); if (!user.isSuperuser()) { @@ -1270,11 +1270,11 @@ public Response publishMigratedDataset(String jsonldBody, @PathParam("id") Strin if (prePubWf.isPresent()) { // Start the workflow, the workflow will call FinalizeDatasetPublication later wfService.start(prePubWf.get(), - new WorkflowContext(createDataverseRequest(user), ds, TriggerType.PrePublishDataset, false), + new WorkflowContext(createDataverseRequest(user), ds, TriggerType.PrePublishDataset, !contactPIDProvider), false); } else { FinalizeDatasetPublicationCommand cmd = new FinalizeDatasetPublicationCommand(ds, - createDataverseRequest(user), false); + createDataverseRequest(user), !contactPIDProvider); ds = commandEngine.submit(cmd); } } catch (CommandException ex) { From 0bfda2ef7c0a7927320a38f70669b50985c908ae Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 28 Jul 2021 12:44:44 -0400 Subject: [PATCH 3/9] update docs --- doc/sphinx-guides/source/admin/dataverses-datasets.rst | 4 +++- .../source/developers/dataset-migration-api.rst | 7 +++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/admin/dataverses-datasets.rst b/doc/sphinx-guides/source/admin/dataverses-datasets.rst index a55c90d2eb3..a18204588c2 100644 --- a/doc/sphinx-guides/source/admin/dataverses-datasets.rst +++ b/doc/sphinx-guides/source/admin/dataverses-datasets.rst @@ -41,7 +41,7 @@ Recursively assigns the users and groups having a role(s),that are in the set co curl -H "X-Dataverse-key: $API_TOKEN" http://$SERVER/api/admin/dataverse/$dataverse-alias/addRoleAssignmentsToChildren Configure a Dataverse Collection to store all new files in a specific file store -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ To direct new files (uploaded when datasets are created or edited) for all datasets in a given Dataverse collection, the store can be specified via the API as shown below, or by editing the 'General Information' for a Dataverse collection on the Dataverse collection page. Only accessible to superusers. :: @@ -110,6 +110,8 @@ Mints a new identifier for a dataset previously registered with a handle. Only a curl -H "X-Dataverse-key: $API_TOKEN" -X POST http://$SERVER/api/admin/$dataset-id/reregisterHDLToPID +.. _send-metadata-to-pid-provider: + Send Dataset metadata to PID provider ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/doc/sphinx-guides/source/developers/dataset-migration-api.rst b/doc/sphinx-guides/source/developers/dataset-migration-api.rst index d46db5e3c9e..b01cdadae79 100644 --- a/doc/sphinx-guides/source/developers/dataset-migration-api.rst +++ b/doc/sphinx-guides/source/developers/dataset-migration-api.rst @@ -7,6 +7,7 @@ This experimental migration API offers an additional option with some potential * metadata can be specified using the json-ld format used in the OAI-ORE metadata export * existing publication dates and PIDs are maintained (currently limited to the case where the PID can be managed by the Dataverse software, e.g. where the authority and shoulder match those the software is configured for) +* updating the PID at the provider can be done immediately or later (with other existing APIs) * adding files can be done via the standard APIs, including using direct-upload to S3 This API consists of 2 calls: one to create an initial Dataset version, and one to 'republish' the dataset through Dataverse with a specified publication date. @@ -47,3 +48,9 @@ The call above creates a Dataset. Once it is created, other APIs can be used to curl -H 'Content-Type: application/jsonld' -H X-Dataverse-key:$API_TOKEN -X POST -d '{"schema:datePublished": "2020-10-26","@context":{ "schema":"http://schema.org/"}}' "$SERVER_URL/api/datasets/{id}/actions/:releasemigrated" datePublished is the only metadata supported in this call. + +An optional query parameter: updatepidatprovider (default is false) can be set to true to automatically update the metadata and targetUrl of the PID at the provider. With this set true, the result of this call will be that the PID redirects to this dataset rather than the dataset in the source repository. + + curl -H 'Content-Type: application/jsonld' -H X-Dataverse-key:$API_TOKEN -X POST -d '{"schema:datePublished": "2020-10-26","@context":{ "schema":"http://schema.org/"}}' "$SERVER_URL/api/datasets/{id}/actions/:releasemigrated?updatepidatprovider=true" + + If the parameter is not added and set to true, other existing APIs can be used to update the PID at the provider later, e.g. :ref:`send-metadata-to-pid-provider` \ No newline at end of file From e38305d1896065f599b5adbb6dfa561fa74ca11f Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 29 Jul 2021 11:41:24 -0400 Subject: [PATCH 4/9] #7790 delete physical files on ds version delete --- src/main/java/edu/harvard/iq/dataverse/DatasetPage.java | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 4f73e64d5a4..be960082bd6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -2777,14 +2777,22 @@ public String editFileMetadata(){ public String deleteDatasetVersion() { DeleteDatasetVersionCommand cmd; + + Map deleteStorageLocations = datafileService.getPhysicalFilesToDelete(dataset.getLatestVersion()); + boolean deleteCommandSuccess = false; try { cmd = new DeleteDatasetVersionCommand(dvRequestService.getDataverseRequest(), dataset); commandEngine.submit(cmd); JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("datasetVersion.message.deleteSuccess")); + deleteCommandSuccess = true; } catch (CommandException ex) { JH.addMessage(FacesMessage.SEVERITY_FATAL, BundleUtil.getStringFromBundle("dataset.message.deleteFailure")); logger.severe(ex.getMessage()); } + + if (deleteCommandSuccess && !deleteStorageLocations.isEmpty()) { + datafileService.finalizeFileDeletes(deleteStorageLocations); + } return returnToDatasetOnly(); } From ab5eb62435c35c1e4b4aab63d190f754ed1d7383 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Sun, 1 Aug 2021 15:10:30 -0400 Subject: [PATCH 5/9] fix for mistmatched tags and skipped tabular files --- .../edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java index 693e3cd6b10..9248069c8c1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java @@ -1425,7 +1425,8 @@ public static void createDataDscr(XMLStreamWriter xmlw, DatasetVersion datasetVe * These days we return early to avoid this exposure. */ if (dataFile.isRestricted()) { - return; + //Skip this file but don't exit the loop so that tabular info from non-restricted files still get written + continue; } if (dataFile != null && dataFile.isTabularData()) { From b50def3737e829438dad3dbcc4d55ac6a8fbd6ef Mon Sep 17 00:00:00 2001 From: qqmyers Date: Mon, 2 Aug 2021 17:20:10 -0400 Subject: [PATCH 6/9] format fixes --- doc/sphinx-guides/source/developers/dataset-migration-api.rst | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/dataset-migration-api.rst b/doc/sphinx-guides/source/developers/dataset-migration-api.rst index b01cdadae79..df31f09fc4b 100644 --- a/doc/sphinx-guides/source/developers/dataset-migration-api.rst +++ b/doc/sphinx-guides/source/developers/dataset-migration-api.rst @@ -51,6 +51,8 @@ datePublished is the only metadata supported in this call. An optional query parameter: updatepidatprovider (default is false) can be set to true to automatically update the metadata and targetUrl of the PID at the provider. With this set true, the result of this call will be that the PID redirects to this dataset rather than the dataset in the source repository. +.. code-block:: bash + curl -H 'Content-Type: application/jsonld' -H X-Dataverse-key:$API_TOKEN -X POST -d '{"schema:datePublished": "2020-10-26","@context":{ "schema":"http://schema.org/"}}' "$SERVER_URL/api/datasets/{id}/actions/:releasemigrated?updatepidatprovider=true" - If the parameter is not added and set to true, other existing APIs can be used to update the PID at the provider later, e.g. :ref:`send-metadata-to-pid-provider` \ No newline at end of file +If the parameter is not added and set to true, other existing APIs can be used to update the PID at the provider later, e.g. :ref:`send-metadata-to-pid-provider` \ No newline at end of file From de648cef5d71a6e724cdc8fcf5d1ec65b30ec076 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 3 Aug 2021 10:18:47 -0400 Subject: [PATCH 7/9] remove fine log stmt that has a NPE --- .../java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java | 6 ------ 1 file changed, 6 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java index 90847ad127a..1e868fa0fc7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java @@ -104,12 +104,6 @@ public static Dataset updateDatasetMDFromJsonLD(Dataset ds, String jsonLDBody, LocalDateTime dateTime = getDateTimeFrom(dateString); ds.setModificationTime(Timestamp.valueOf(dateTime)); } - try { - logger.fine("Output dsv: " + new OREMap(dsv, false).getOREMap().toString()); - } catch (Exception e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } } return ds; } From 3baaa5b436470113146ad9329d8cf2a9272345ea Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 3 Aug 2021 10:40:24 -0400 Subject: [PATCH 8/9] fix content-type in docs --- doc/sphinx-guides/source/developers/dataset-migration-api.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/developers/dataset-migration-api.rst b/doc/sphinx-guides/source/developers/dataset-migration-api.rst index df31f09fc4b..1dc8f7866e0 100644 --- a/doc/sphinx-guides/source/developers/dataset-migration-api.rst +++ b/doc/sphinx-guides/source/developers/dataset-migration-api.rst @@ -45,7 +45,7 @@ The call above creates a Dataset. Once it is created, other APIs can be used to export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx export SERVER_URL=https://demo.dataverse.org - curl -H 'Content-Type: application/jsonld' -H X-Dataverse-key:$API_TOKEN -X POST -d '{"schema:datePublished": "2020-10-26","@context":{ "schema":"http://schema.org/"}}' "$SERVER_URL/api/datasets/{id}/actions/:releasemigrated" + curl -H 'Content-Type: application/ld+json' -H X-Dataverse-key:$API_TOKEN -X POST -d '{"schema:datePublished": "2020-10-26","@context":{ "schema":"http://schema.org/"}}' "$SERVER_URL/api/datasets/{id}/actions/:releasemigrated" datePublished is the only metadata supported in this call. @@ -53,6 +53,6 @@ An optional query parameter: updatepidatprovider (default is false) can be set t .. code-block:: bash - curl -H 'Content-Type: application/jsonld' -H X-Dataverse-key:$API_TOKEN -X POST -d '{"schema:datePublished": "2020-10-26","@context":{ "schema":"http://schema.org/"}}' "$SERVER_URL/api/datasets/{id}/actions/:releasemigrated?updatepidatprovider=true" + curl -H 'Content-Type: application/ld+json' -H X-Dataverse-key:$API_TOKEN -X POST -d '{"schema:datePublished": "2020-10-26","@context":{ "schema":"http://schema.org/"}}' "$SERVER_URL/api/datasets/{id}/actions/:releasemigrated?updatepidatprovider=true" If the parameter is not added and set to true, other existing APIs can be used to update the PID at the provider later, e.g. :ref:`send-metadata-to-pid-provider` \ No newline at end of file From eca842f9bee9e9e49ccf5f052494a4567f134bad Mon Sep 17 00:00:00 2001 From: Gustavo Durand Date: Tue, 3 Aug 2021 11:22:25 -0400 Subject: [PATCH 9/9] Update DdiExportUtil.java --- .../edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java index 9248069c8c1..9061c890f01 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java @@ -1422,10 +1422,9 @@ public static void createDataDscr(XMLStreamWriter xmlw, DatasetVersion datasetVe * included for restricted files but that meant that summary * statistics were exposed. (To get at these statistics, API users * should instead use the "Data Variable Metadata Access" endpoint.) - * These days we return early to avoid this exposure. + * These days we skip restricted files to avoid this exposure. */ if (dataFile.isRestricted()) { - //Skip this file but don't exit the loop so that tabular info from non-restricted files still get written continue; }