From b9bed069c01a5587cc1a75c551ff4352dd9fc755 Mon Sep 17 00:00:00 2001 From: chenganj Date: Fri, 19 Jul 2019 15:30:05 -0400 Subject: [PATCH 01/94] Internationalization - Application Terms of Use --- .../database/upgrades/upgrade_v4.15.1_to.sql | 10 ++++++ .../edu/harvard/iq/dataverse/api/Admin.java | 14 ++++++++ .../iq/dataverse/settings/Setting.java | 26 +++++++++++++- .../settings/SettingsServiceBean.java | 34 +++++++++++++++++++ .../iq/dataverse/util/SystemConfig.java | 3 +- 5 files changed, 85 insertions(+), 2 deletions(-) create mode 100644 scripts/database/upgrades/upgrade_v4.15.1_to.sql diff --git a/scripts/database/upgrades/upgrade_v4.15.1_to.sql b/scripts/database/upgrades/upgrade_v4.15.1_to.sql new file mode 100644 index 00000000000..61741314699 --- /dev/null +++ b/scripts/database/upgrades/upgrade_v4.15.1_to.sql @@ -0,0 +1,10 @@ +ALTER TABLE setting ADD COLUMN lang text; + +UPDATE setting +SET lang = 'en'; + +ALTER TABLE ONLY setting + DROP CONSTRAINT setting_pkey ; + +ALTER TABLE ONLY setting + ADD CONSTRAINT setting_pkey PRIMARY KEY (name,lang); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index 96cb347ff18..52badbedc71 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -156,6 +156,13 @@ public Response putSetting(@PathParam("name") String name, String content) { return ok(jsonObjectBuilder().add(s.getName(), s.getContent())); } + @Path("settings/{name}/lang/{lang}") + @PUT + public Response putSetting(@PathParam("name") String name, @PathParam("lang") String lang, String content) { + Setting s = settingsSvc.set(name, lang, content); + return ok("Setting " + name + " - " + lang + " - added."); + } + @Path("settings/{name}") @GET public Response getSetting(@PathParam("name") String name) { @@ -172,6 +179,13 @@ public Response deleteSetting(@PathParam("name") String name) { return ok("Setting " + name + " deleted."); } + @Path("settings/{name}/lang/{lang}") + @DELETE + public Response deleteSetting(@PathParam("name") String name, @PathParam("lang") String lang) { + settingsSvc.delete(name, lang); + return ok("Setting " + name + " - " + lang + " deleted."); + } + @Path("authenticationProviderFactories") @GET public Response listAuthProviderFactories() { diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/Setting.java b/src/main/java/edu/harvard/iq/dataverse/settings/Setting.java index 80653770447..75ca9f13660 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/Setting.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/Setting.java @@ -16,7 +16,12 @@ @NamedQuery( name="Setting.deleteByName", query="DELETE FROM Setting s WHERE s.name=:name"), @NamedQuery( name="Setting.findAll", - query="SELECT s FROM Setting s") + query="SELECT s FROM Setting s"), + @NamedQuery( name="Setting.deleteByNameAndLang", + query="DELETE FROM Setting s WHERE s.name=:name AND s.lang=:lang"), + @NamedQuery( name="Setting.findByNameAndLang", + query = "SELECT s FROM Setting s WHERE s.name=:name AND s.lang=:lang" ), + }) @Entity public class Setting implements Serializable { @@ -24,6 +29,10 @@ public class Setting implements Serializable { @Id private String name; + @Id + @Column(columnDefinition = "TEXT") + private String lang; + @Column(columnDefinition = "TEXT") private String content; @@ -31,8 +40,15 @@ public Setting() { } public Setting(String name, String content) { + this.name = name; + this.content = content; + this.lang = "en"; + } + + public Setting(String name, String lang, String content) { this.name = name; this.content = content; + this.lang = lang; } public String getName() { @@ -51,6 +67,14 @@ public void setContent(String content) { this.content = content; } + public String getLang() { + return lang; + } + + public void setLang(String lang) { + this.lang = lang; + } + @Override public int hashCode() { int hash = 7; diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java index c434a99f17f..bb9d850cbac 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java @@ -5,6 +5,7 @@ import edu.harvard.iq.dataverse.api.ApiBlockingFilter; import edu.harvard.iq.dataverse.util.StringUtil; import java.util.HashSet; +import java.util.List; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; @@ -478,10 +479,26 @@ public String get( String name, String defaultValue ) { String val = get(name); return (val!=null) ? val : defaultValue; } + + public String get(String name, String lang, String defaultValue ) { + List tokens = em.createNamedQuery("Setting.findByNameAndLang", Setting.class) + .setParameter("name", name ) + .setParameter("lang", lang ) + .getResultList(); + String val = null; + if(tokens.size() > 0) { + val = tokens.get(0).getContent(); + } + return (val!=null) ? val : defaultValue; + } public String getValueForKey( Key key, String defaultValue ) { return get( key.toString(), defaultValue ); } + + public String getValueForKey( Key key, String lang, String defaultValue ) { + return get( key.toString(), lang, defaultValue ); + } public Setting set( String name, String content ) { Setting s = new Setting( name, content ); @@ -490,6 +507,14 @@ public Setting set( String name, String content ) { .setInfo(name + ": " + content)); return s; } + + public Setting set( String name, String lang, String content ) { + Setting s = new Setting( name, lang, content ); + em.merge(s); + actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.Setting, "set") + .setInfo(name + ": " +lang + ": " + content)); + return s; + } public Setting setValueForKey( Key key, String content ) { return set( key.toString(), content ); @@ -526,6 +551,15 @@ public void delete( String name ) { .setParameter("name", name) .executeUpdate(); } + + public void delete( String name, String lang ) { + actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.Setting, "delete") + .setInfo(name)); + em.createNamedQuery("Setting.deleteByNameAndLang") + .setParameter("name", name) + .setParameter("lang", lang) + .executeUpdate(); + } public Set listAll() { return new HashSet<>(em.createNamedQuery("Setting.findAll", Setting.class).getResultList()); diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index b36c6e2bdec..f33fbb91b60 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -489,8 +489,9 @@ public boolean isThumbnailGenerationDisabledForPDF() { } public String getApplicationTermsOfUse() { + String language = BundleUtil.getCurrentLocale().getLanguage(); String saneDefaultForAppTermsOfUse = BundleUtil.getStringFromBundle("system.app.terms"); - String appTermsOfUse = settingsService.getValueForKey(SettingsServiceBean.Key.ApplicationTermsOfUse, saneDefaultForAppTermsOfUse); + String appTermsOfUse = settingsService.getValueForKey(SettingsServiceBean.Key.ApplicationTermsOfUse, language, saneDefaultForAppTermsOfUse); return appTermsOfUse; } From 326d0dc8d65d879a74ba31171d42c3e354fbf4bf Mon Sep 17 00:00:00 2001 From: Danny Brooke Date: Thu, 15 Aug 2019 11:53:02 -0400 Subject: [PATCH 02/94] Release Notes for 4.16 --- doc/release-notes/4.16-release-notes | 1 + 1 file changed, 1 insertion(+) create mode 100644 doc/release-notes/4.16-release-notes diff --git a/doc/release-notes/4.16-release-notes b/doc/release-notes/4.16-release-notes new file mode 100644 index 00000000000..8b137891791 --- /dev/null +++ b/doc/release-notes/4.16-release-notes @@ -0,0 +1 @@ + From 2f13eb4c3019723f78e9d22703d9e7e3a4597081 Mon Sep 17 00:00:00 2001 From: Danny Brooke Date: Thu, 15 Aug 2019 22:10:38 -0400 Subject: [PATCH 03/94] 4.16 release notes initial commit, based off 4.15.1 --- doc/release-notes/4.16-release-notes | 31 ++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/doc/release-notes/4.16-release-notes b/doc/release-notes/4.16-release-notes index 8b137891791..cfaf495f7bd 100644 --- a/doc/release-notes/4.16-release-notes +++ b/doc/release-notes/4.16-release-notes @@ -1 +1,32 @@ +This release includes the following new features, enhancements, and bug features: +yyy, +yyy, +and yyy. + +If you're running a Dataverse Installation, you should be aware of these changes: + +For the complete list of code changes in this release, see the 4.16 milestone in Github. + +For help with upgrading, installing, or general questions please post to the Dataverse Google Group or email support@dataverse.org. + +## Installation: + +If this is a new installation, please see our Installation Guide. + +## Upgrade: + +1. Undeploy the previous version. + + - <glassfish install path>/glassfish4/bin/asadmin list-applications + - <glassfish install path>/glassfish4/bin/asadmin undeploy dataverse + +2. Stop glassfish and remove the generated directory, start + - service glassfish stop + - remove the generated directory: rm -rf <glassfish install path>glassfish4/glassfish/domains/domain1/generated + - service glassfish start + +3. Deploy this version. + - <glassfish install path>/glassfish4/bin/asadmin deploy <path>dataverse-4.16.war + +4. Restart glassfish From b9827468452531f3a91b2ac8118c6095788fd9af Mon Sep 17 00:00:00 2001 From: Danny Brooke Date: Thu, 15 Aug 2019 22:13:00 -0400 Subject: [PATCH 04/94] adding 5687 notes --- doc/release-notes/4.16-release-notes | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/doc/release-notes/4.16-release-notes b/doc/release-notes/4.16-release-notes index cfaf495f7bd..f9140961b7a 100644 --- a/doc/release-notes/4.16-release-notes +++ b/doc/release-notes/4.16-release-notes @@ -4,7 +4,11 @@ This release includes the following new features, enhancements, and bug features yyy, and yyy. -If you're running a Dataverse Installation, you should be aware of these changes: +If you're running a Dataverse Installation, you should be aware of these things: + + - In making the fix for xxx we discovered that notifications created prior to 2018 may have been invalidated. With this release we advise that these older notifications are deleted from the database. The following query can be used for this purpose: + +- delete from usernotification where date_part('year', senddate) < 2018; For the complete list of code changes in this release, see the 4.16 milestone in Github. From 613a85392570ac1eafa9b4266726eafd55503ab9 Mon Sep 17 00:00:00 2001 From: Danny Brooke Date: Thu, 15 Aug 2019 22:13:34 -0400 Subject: [PATCH 05/94] deleting the file for 5687 since I've moved it to 4.16 file --- doc/release-notes/5687-invalid-notifications.md | 6 ------ 1 file changed, 6 deletions(-) delete mode 100644 doc/release-notes/5687-invalid-notifications.md diff --git a/doc/release-notes/5687-invalid-notifications.md b/doc/release-notes/5687-invalid-notifications.md deleted file mode 100644 index 06dc3edd78f..00000000000 --- a/doc/release-notes/5687-invalid-notifications.md +++ /dev/null @@ -1,6 +0,0 @@ -The following needs to be added to the release notes and/or upgrade instructions: - -In making this fix we discovered that notifications created prior to 2018 may have been invalidated. With this release we advise that these older notifications are deleted from the database. The following query can be used for this purpose: - -delete from usernotification where date_part('year', senddate) < 2018; - From 0a1f2d0fbe2ab31bdcca5e06ef502de958113adf Mon Sep 17 00:00:00 2001 From: Danny Brooke Date: Thu, 15 Aug 2019 22:18:56 -0400 Subject: [PATCH 06/94] adding 5766 and 5967 notes --- doc/release-notes/4.16-release-notes | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/doc/release-notes/4.16-release-notes b/doc/release-notes/4.16-release-notes index f9140961b7a..0191580546c 100644 --- a/doc/release-notes/4.16-release-notes +++ b/doc/release-notes/4.16-release-notes @@ -7,8 +7,11 @@ and yyy. If you're running a Dataverse Installation, you should be aware of these things: - In making the fix for xxx we discovered that notifications created prior to 2018 may have been invalidated. With this release we advise that these older notifications are deleted from the database. The following query can be used for this purpose: - -- delete from usernotification where date_part('year', senddate) < 2018; +delete from usernotification where date_part('year', senddate) < 2018; + - You should update your custom analytics code to include CDATA sections, inside the `script` tags, around the javascript code. We have updated the documentation and sample analytics code snippet provided in [Installation Guide > Configuration > Web Analytics Code](http://guides.dataverse.org/en/latest/installation/config.html#web-analytics-code) to fix a bug that broke the rendering of the 403 and 500 custom error pgs (#5967). + - You should update citation metadata block and run ReExportall to remove an errant tab +curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @citation.tsv -H "Content-type: text/tab-separated-values" +Run ReExportall to update the citation metadata block to remove an errant tab: http://guides.dataverse.org/en/4.16/admin/metadataexport.html?highlight=export#batch-exports-through-the-api For the complete list of code changes in this release, see the 4.16 milestone in Github. From 5cace5fb741cec7e86205c817d9f3d39a8a226b8 Mon Sep 17 00:00:00 2001 From: Danny Brooke Date: Thu, 15 Aug 2019 22:19:41 -0400 Subject: [PATCH 07/94] Delete 5766-citation-tsv --- doc/release-notes/5766-citation-tsv | 4 ---- 1 file changed, 4 deletions(-) delete mode 100644 doc/release-notes/5766-citation-tsv diff --git a/doc/release-notes/5766-citation-tsv b/doc/release-notes/5766-citation-tsv deleted file mode 100644 index c9c04bd638f..00000000000 --- a/doc/release-notes/5766-citation-tsv +++ /dev/null @@ -1,4 +0,0 @@ -Update citation metadata block -curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @citation.tsv -H "Content-type: text/tab-separated-values" - -Run ReExportall to update the citation metadata block to remove an errant tab: http://guides.dataverse.org/en/4.16/admin/metadataexport.html?highlight=export#batch-exports-through-the-api From 0647dda8e4ec4f5bab2086adf46ede530a1b88c9 Mon Sep 17 00:00:00 2001 From: Danny Brooke Date: Thu, 15 Aug 2019 22:19:59 -0400 Subject: [PATCH 08/94] Delete 5967-custom-analytics-fix.md --- doc/release-notes/5967-custom-analytics-fix.md | 1 - 1 file changed, 1 deletion(-) delete mode 100644 doc/release-notes/5967-custom-analytics-fix.md diff --git a/doc/release-notes/5967-custom-analytics-fix.md b/doc/release-notes/5967-custom-analytics-fix.md deleted file mode 100644 index bd5ebb51f73..00000000000 --- a/doc/release-notes/5967-custom-analytics-fix.md +++ /dev/null @@ -1 +0,0 @@ -Update your custom analytics code to include CDATA sections, inside the `script` tags, around the javascript code. We have updated the documentation and sample analytics code snippet provided in [Installation Guide > Configuration > Web Analytics Code](http://guides.dataverse.org/en/latest/installation/config.html#web-analytics-code) to fix a bug that broke the rendering of the 403 and 500 custom error pgs (#5967). \ No newline at end of file From 2c77702ea4382d967a1391608379c9393bf0c554 Mon Sep 17 00:00:00 2001 From: Danny Brooke Date: Fri, 16 Aug 2019 13:30:33 -0400 Subject: [PATCH 09/94] stubbing out content --- doc/release-notes/4.16-release-notes | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/doc/release-notes/4.16-release-notes b/doc/release-notes/4.16-release-notes index 0191580546c..6f36d2c7fbf 100644 --- a/doc/release-notes/4.16-release-notes +++ b/doc/release-notes/4.16-release-notes @@ -1,11 +1,18 @@ -This release includes the following new features, enhancements, and bug features: +This release brings new features, enhancements, and bug fixes to Dataverse. The release highlights include: -yyy, -yyy, +- Metrics view updates +- Codebook +- Lock improvements +- On success + +- Researchers will now be able to download HTML Codebooks as an additional Dataset Export format. This pro +The metrics view at both the Dataset and File level has been redesigned. layout has be, and yyy. If you're running a Dataverse Installation, you should be aware of these things: +- Destroy behavior + - In making the fix for xxx we discovered that notifications created prior to 2018 may have been invalidated. With this release we advise that these older notifications are deleted from the database. The following query can be used for this purpose: delete from usernotification where date_part('year', senddate) < 2018; - You should update your custom analytics code to include CDATA sections, inside the `script` tags, around the javascript code. We have updated the documentation and sample analytics code snippet provided in [Installation Guide > Configuration > Web Analytics Code](http://guides.dataverse.org/en/latest/installation/config.html#web-analytics-code) to fix a bug that broke the rendering of the 403 and 500 custom error pgs (#5967). From e93937c5c70b23b6569cc987efc7ab0ee1489dd1 Mon Sep 17 00:00:00 2001 From: Danny Brooke Date: Fri, 16 Aug 2019 13:49:16 -0400 Subject: [PATCH 10/94] more updates --- doc/release-notes/4.16-release-notes | 30 ++++++++++++++++------------ 1 file changed, 17 insertions(+), 13 deletions(-) diff --git a/doc/release-notes/4.16-release-notes b/doc/release-notes/4.16-release-notes index 6f36d2c7fbf..e4712defaf8 100644 --- a/doc/release-notes/4.16-release-notes +++ b/doc/release-notes/4.16-release-notes @@ -1,24 +1,28 @@ This release brings new features, enhancements, and bug fixes to Dataverse. The release highlights include: -- Metrics view updates -- Codebook -- Lock improvements -- On success +**Metrics Redesign** +The metrics view at both the Dataset and File level has been redesigned. The main driver of this redesign has been the expanded metrics (citations and views) provided through an integration with Make Data Count, but installations that do not adopt Make Data Count will also be able to take advantage of the new metrics view. Read more in the guides. -- Researchers will now be able to download HTML Codebooks as an additional Dataset Export format. This pro -The metrics view at both the Dataset and File level has been redesigned. layout has be, -and yyy. +**HTML Codebook Export** +Researchers will now be able to download HTML Codebooks as an additional Dataset Export format. This provides... Read more in the guides. If you're running a Dataverse Installation, you should be aware of these things: -- Destroy behavior +**Run ReExportall** +We made changes to the citation block in this release that will require installations to run ReExportall as part of the upgrade process. - - In making the fix for xxx we discovered that notifications created prior to 2018 may have been invalidated. With this release we advise that these older notifications are deleted from the database. The following query can be used for this purpose: +**Destroy Updates** +Destroying Datasets in Dataverse will now unregister/delete the PID for that Dataset. This eliminates the need for an extra step to "clean up" a PID registration after destroying a Dataset. + +**Deleting Notifications** +In making the fix for #5687 we discovered that notifications created prior to 2018 may have been invalidated. With this release we advise that these older notifications are deleted from the database. The following query can be used for this purpose: delete from usernotification where date_part('year', senddate) < 2018; - - You should update your custom analytics code to include CDATA sections, inside the `script` tags, around the javascript code. We have updated the documentation and sample analytics code snippet provided in [Installation Guide > Configuration > Web Analytics Code](http://guides.dataverse.org/en/latest/installation/config.html#web-analytics-code) to fix a bug that broke the rendering of the 403 and 500 custom error pgs (#5967). - - You should update citation metadata block and run ReExportall to remove an errant tab -curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @citation.tsv -H "Content-type: text/tab-separated-values" -Run ReExportall to update the citation metadata block to remove an errant tab: http://guides.dataverse.org/en/4.16/admin/metadataexport.html?highlight=export#batch-exports-through-the-api + +**Lock Improvements** +A new type of Dataset lock implemented in 4.15 caused Read more in the guides. + +**Custom Analytics Code Changes** +You should update your custom analytics code to include CDATA sections, inside the `script` tags, around the javascript code. We have updated the documentation and sample analytics code snippet provided in [Installation Guide > Configuration > Web Analytics Code](http://guides.dataverse.org/en/latest/installation/config.html#web-analytics-code) to fix a bug that broke the rendering of the 403 and 500 custom error pgs (#5967). For the complete list of code changes in this release, see the 4.16 milestone in Github. From 287a1f2fe7770baa2c61626e1c95ad15fa90e500 Mon Sep 17 00:00:00 2001 From: Danny Brooke Date: Fri, 16 Aug 2019 14:20:00 -0400 Subject: [PATCH 11/94] more changes, getting closer --- doc/release-notes/4.16-release-notes | 23 +++++++++++++++-------- 1 file changed, 15 insertions(+), 8 deletions(-) diff --git a/doc/release-notes/4.16-release-notes b/doc/release-notes/4.16-release-notes index e4712defaf8..ddcd780d527 100644 --- a/doc/release-notes/4.16-release-notes +++ b/doc/release-notes/4.16-release-notes @@ -1,28 +1,29 @@ This release brings new features, enhancements, and bug fixes to Dataverse. The release highlights include: **Metrics Redesign** -The metrics view at both the Dataset and File level has been redesigned. The main driver of this redesign has been the expanded metrics (citations and views) provided through an integration with Make Data Count, but installations that do not adopt Make Data Count will also be able to take advantage of the new metrics view. Read more in the guides. +The metrics view at both the Dataset and File level has been redesigned. The main driver of this redesign has been the expanded metrics (citations and views) provided through an integration with Make Data Count, but installations that do not adopt Make Data Count will also be able to take advantage of the new metrics panel. **HTML Codebook Export** -Researchers will now be able to download HTML Codebooks as an additional Dataset Export format. This provides... Read more in the guides. +Researchers will now be able to download HTML Codebooks as an additional Dataset Export type. This codebook provides valuable information about the contents and structure of a dataset and will increase resusability of the datasets in Dataverse. If you're running a Dataverse Installation, you should be aware of these things: **Run ReExportall** We made changes to the citation block in this release that will require installations to run ReExportall as part of the upgrade process. +**Custom Analytics Code Changes** +You should update your custom analytics code to include CDATA sections, inside the `script` tags, around the javascript code. We have updated the documentation and sample analytics code snippet provided in [Installation Guide > Configuration > Web Analytics Code](http://guides.dataverse.org/en/latest/installation/config.html#web-analytics-code) to fix a bug that broke the rendering of the 403 and 500 custom error pgs (#5967). + **Destroy Updates** -Destroying Datasets in Dataverse will now unregister/delete the PID for that Dataset. This eliminates the need for an extra step to "clean up" a PID registration after destroying a Dataset. +Destroying Datasets in Dataverse will now unregister/delete the PID with the PID provider. This eliminates the need for an extra step to "clean up" a PID registration after destroying a Dataset. **Deleting Notifications** In making the fix for #5687 we discovered that notifications created prior to 2018 may have been invalidated. With this release we advise that these older notifications are deleted from the database. The following query can be used for this purpose: -delete from usernotification where date_part('year', senddate) < 2018; -**Lock Improvements** -A new type of Dataset lock implemented in 4.15 caused Read more in the guides. +`delete from usernotification where date_part('year', senddate) < 2018;` -**Custom Analytics Code Changes** -You should update your custom analytics code to include CDATA sections, inside the `script` tags, around the javascript code. We have updated the documentation and sample analytics code snippet provided in [Installation Guide > Configuration > Web Analytics Code](http://guides.dataverse.org/en/latest/installation/config.html#web-analytics-code) to fix a bug that broke the rendering of the 403 and 500 custom error pgs (#5967). +**Lock Improvements** +In 4.15 a new lock was added to prevent parallel edits. After seeing that the lock was not being released as expected, which required administrator intervention, we've adjusted this code to release the lock as expected. For the complete list of code changes in this release, see the 4.16 milestone in Github. @@ -48,3 +49,9 @@ If this is a new installation, please see our Date: Mon, 19 Aug 2019 11:17:53 -0400 Subject: [PATCH 12/94] CORS fix --- .../java/edu/harvard/iq/dataverse/api/ApiBlockingFilter.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/ApiBlockingFilter.java b/src/main/java/edu/harvard/iq/dataverse/api/ApiBlockingFilter.java index 8f5b8333b8e..6fc0e488fe3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/ApiBlockingFilter.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/ApiBlockingFilter.java @@ -159,6 +159,9 @@ public void doFilter(ServletRequest sr, ServletResponse sr1, FilterChain fc) thr } } try { + ((HttpServletResponse) sr1).addHeader("Access-Control-Allow-Origin", "*"); + ((HttpServletResponse) sr1).addHeader("Access-Control-Allow-Methods","PUT"); + ((HttpServletResponse) sr1).addHeader("Access-Control-Allow-Headers", "Content-Type, X-Dataverse-Key"); fc.doFilter(sr, sr1); } catch ( ServletException se ) { logger.log(Level.WARNING, "Error processing " + requestURI +": " + se.getMessage(), se); From d4f9012b28dc7569250bea2df8abf27c0c31b0ef Mon Sep 17 00:00:00 2001 From: Danny Brooke Date: Mon, 19 Aug 2019 15:46:00 -0400 Subject: [PATCH 13/94] more info about sample data --- doc/sphinx-guides/source/developers/tips.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/tips.rst b/doc/sphinx-guides/source/developers/tips.rst index 090a5192e3c..67374aa1c99 100755 --- a/doc/sphinx-guides/source/developers/tips.rst +++ b/doc/sphinx-guides/source/developers/tips.rst @@ -155,7 +155,7 @@ Sample Data You may want to populate your installation of Dataverse with sample data. You have a couple options: -- Code in https://github.com/IQSS/dataverse-sample-data (recommended). +- Code in https://github.com/IQSS/dataverse-sample-data (recommended). This set of sample data includes several common data types, data subsetted from production datasets in dataverse.harvard.edu, datasets with file hierarchy, and includes integrations with explore tools. - Scripts called from ``scripts/deploy/phoenix.dataverse.org/post``. ---- From 6fcba63014592c6d759516aec4174186099398e4 Mon Sep 17 00:00:00 2001 From: Danny Brooke Date: Mon, 19 Aug 2019 15:50:25 -0400 Subject: [PATCH 14/94] add non-prod, bolding for importance --- doc/sphinx-guides/source/developers/tips.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/tips.rst b/doc/sphinx-guides/source/developers/tips.rst index 67374aa1c99..0812c95f24b 100755 --- a/doc/sphinx-guides/source/developers/tips.rst +++ b/doc/sphinx-guides/source/developers/tips.rst @@ -153,7 +153,7 @@ On a Mac, you won't have git installed unless you have "Command Line Developer T Sample Data ----------- -You may want to populate your installation of Dataverse with sample data. You have a couple options: +You may want to populate your **non-production** installation(s) of Dataverse with sample data. You have a couple options: - Code in https://github.com/IQSS/dataverse-sample-data (recommended). This set of sample data includes several common data types, data subsetted from production datasets in dataverse.harvard.edu, datasets with file hierarchy, and includes integrations with explore tools. - Scripts called from ``scripts/deploy/phoenix.dataverse.org/post``. From 427b4131780ae1b31fba7f058324ebfae7998573 Mon Sep 17 00:00:00 2001 From: Danny Brooke Date: Mon, 19 Aug 2019 15:59:48 -0400 Subject: [PATCH 15/94] updates from code review "and more" is more scalable! --- doc/sphinx-guides/source/developers/tips.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/tips.rst b/doc/sphinx-guides/source/developers/tips.rst index 0812c95f24b..03a98f64269 100755 --- a/doc/sphinx-guides/source/developers/tips.rst +++ b/doc/sphinx-guides/source/developers/tips.rst @@ -155,7 +155,7 @@ Sample Data You may want to populate your **non-production** installation(s) of Dataverse with sample data. You have a couple options: -- Code in https://github.com/IQSS/dataverse-sample-data (recommended). This set of sample data includes several common data types, data subsetted from production datasets in dataverse.harvard.edu, datasets with file hierarchy, and includes integrations with explore tools. +- Code in https://github.com/IQSS/dataverse-sample-data (recommended). This set of sample data includes several common data types, data subsetted from production datasets in dataverse.harvard.edu, datasets with file hierarchy, and more. - Scripts called from ``scripts/deploy/phoenix.dataverse.org/post``. ---- From 35000590852cbce17144d2bedfed52bff3e8c25c Mon Sep 17 00:00:00 2001 From: Danny Brooke Date: Mon, 19 Aug 2019 20:18:36 -0400 Subject: [PATCH 16/94] add harvesting note --- doc/release-notes/4.16-release-notes | 3 +++ 1 file changed, 3 insertions(+) diff --git a/doc/release-notes/4.16-release-notes b/doc/release-notes/4.16-release-notes index ddcd780d527..f47927c399e 100644 --- a/doc/release-notes/4.16-release-notes +++ b/doc/release-notes/4.16-release-notes @@ -25,6 +25,9 @@ In making the fix for #5687 we discovered that notifications created prior to 20 **Lock Improvements** In 4.15 a new lock was added to prevent parallel edits. After seeing that the lock was not being released as expected, which required administrator intervention, we've adjusted this code to release the lock as expected. +**Harvesting Improvements** +We've updated the Harvesting code to better handle problematic records during incremental harvests. + For the complete list of code changes in this release, see the 4.16 milestone in Github. For help with upgrading, installing, or general questions please post to the Dataverse Google Group or email support@dataverse.org. From 89a249d18f7ad4f6bb519b553c749fe7b8fcf80f Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 20 Aug 2019 12:29:35 -0400 Subject: [PATCH 17/94] add sample data to list of related projects #6103 --- doc/sphinx-guides/source/developers/intro.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/sphinx-guides/source/developers/intro.rst b/doc/sphinx-guides/source/developers/intro.rst index e82f44cae27..f5a970d772c 100755 --- a/doc/sphinx-guides/source/developers/intro.rst +++ b/doc/sphinx-guides/source/developers/intro.rst @@ -56,6 +56,7 @@ As a developer, you also may be interested in these projects related to Datavers - External Tools - add additional features to Dataverse: See the :doc:`/installation/external-tools` section of the Installation Guide. - Dataverse API client libraries - use Dataverse APIs from various languages: :doc:`/api/client-libraries` - DVUploader - a stand-alone command-line Java application that uses the Dataverse API to support upload of files from local disk to a Dataset: https://github.com/IQSS/dataverse-uploader +- dataverse-sample-data - populate your Dataverse installation with sample data: https://github.com/IQSS/dataverse-sample-data - dataverse-metrics - aggregate and visualize metrics for installations of Dataverse around the world: https://github.com/IQSS/dataverse-metrics - Configuration management scripts - Ansible, Puppet, etc.: See "Advanced Installation" in the :doc:`/installation/prep` section of the Installation Guide. - :doc:`/developers/unf/index` (Java) - a Universal Numerical Fingerprint: https://github.com/IQSS/UNF From 9075bb3627753692d6921f1142f3ebef5f257d4b Mon Sep 17 00:00:00 2001 From: Danny Brooke Date: Tue, 20 Aug 2019 12:39:24 -0400 Subject: [PATCH 18/94] Rename 4.16-release-notes to 4.16-release-notes.md --- doc/release-notes/{4.16-release-notes => 4.16-release-notes.md} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename doc/release-notes/{4.16-release-notes => 4.16-release-notes.md} (100%) diff --git a/doc/release-notes/4.16-release-notes b/doc/release-notes/4.16-release-notes.md similarity index 100% rename from doc/release-notes/4.16-release-notes rename to doc/release-notes/4.16-release-notes.md From 4c87ec6cf4dc397a59e996c0d3627c473d67c1d5 Mon Sep 17 00:00:00 2001 From: j-n-c Date: Tue, 20 Aug 2019 17:47:03 +0100 Subject: [PATCH 19/94] #5665 Doc > Configuration > Auth Modes refactor --- doc/sphinx-guides/source/installation/config.rst | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 12e94e178a1..759001eb6a3 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -185,17 +185,18 @@ There are three valid configurations or modes for authenticating users to Datave Out of the box, Dataverse is configured in "local only" mode. The "dataverseAdmin" superuser account mentioned in the :doc:`/installation/installation-main` section is an example of a local account. Internally, these accounts are called "builtin" because they are built in to the Dataverse application itself. -To configure Shibboleth see the :doc:`shibboleth` section and to configure OAuth see the :doc:`oauth2` section. - The ``authenticationproviderrow`` database table controls which "authentication providers" are available within Dataverse. Out of the box, a single row with an id of "builtin" will be present. For each user in Dataverse, the ``authenticateduserlookup`` table will have a value under ``authenticationproviderid`` that matches this id. For example, the default "dataverseAdmin" user will have the value "builtin" under ``authenticationproviderid``. Why is this important? Users are tied to a specific authentication provider but conversion mechanisms are available to switch a user from one authentication provider to the other. As explained in the :doc:`/user/account` section of the User Guide, a graphical workflow is provided for end users to convert from the "builtin" authentication provider to a remote provider. Conversion from a remote authentication provider to the builtin provider can be performed by a sysadmin with access to the "admin" API. See the :doc:`/api/native-api` section of the API Guide for how to list users and authentication providers as JSON. -Enabling a second authentication provider will result in the Log In page showing additional providers for your users to choose from. By default, the Log In page will show the "builtin" provider, but you can adjust this via the ``:DefaultAuthProvider`` configuration option. +Adding and enabling a second authentication provider (``Add Authentication Provider`` and ``Enable or Disable an Authentication Provider`` from :doc:`/api/native-api`) will result in the Log In page showing additional providers for your users to choose from. By default, the Log In page will show the "builtin" provider, but you can adjust this via the ``:DefaultAuthProvider`` configuration option. Further customization can be achieved by setting ``:AllowSignUp`` (in :doc:`config`) to "false", thus preventing users from creating local accounts via the web interface. Please note that local accounts can also be created via API, and the way to prevent this is to block the ``builtin-users`` endpoint or scramble (or remove) the ``BuiltinUsers.KEY`` database setting per the :doc:`config` section. + +To configure Shibboleth see the :doc:`shibboleth` section and to configure OAuth see the :doc:`oauth2` section. -"Remote only" mode should be considered experimental until https://github.com/IQSS/dataverse/issues/2974 is resolved. For now, "remote only" means: +As for the "Remote only" authentication mode, it means that: - Shibboleth or OAuth has been enabled. -- ``:AllowSignUp`` is set to "false" per the :doc:`config` section to prevent users from creating local accounts via the web interface. Please note that local accounts can also be created via API, and the way to prevent this is to block the ``builtin-users`` endpoint or scramble (or remove) the ``BuiltinUsers.KEY`` database setting per the :doc:`config` section. -- The "builtin" authentication provider has been disabled (:ref:`api_toggle_auth_provider`). Note that disabling the builting auth provider means that the API endpoint for converting an account from a remote auth provider will not work. This is the main reason why https://github.com/IQSS/dataverse/issues/2974 is still open. Converting directly from one remote authentication provider to another (i.e. from GitHub to Google) is not supported. Conversion from remote is always to builtin. Then the user initiates a conversion from builtin to remote. Note that longer term, the plan is to permit multiple login options to the same Dataverse account per https://github.com/IQSS/dataverse/issues/3487 (so all this talk of conversion will be moot) but for now users can only use a single login option, as explained in the :doc:`/user/account` section of the User Guide. In short, "remote only" might work for you if you only plan to use a single remote authentication provider such that no conversion between remote authentication providers will be necessary. +- ``:AllowSignUp`` is set to "false" to prevent users from creating local accounts via the web interface. +- ``:DefaultAuthProvider`` has been set to use the desired authentication provider +- The "builtin" authentication provider has been disabled (:ref:`api_toggle_auth_provider`). Note that disabling the "builtin" authentication provider means that the API endpoint for converting an account from a remote auth provider will not work. Converting directly from one remote authentication provider to another (i.e. from GitHub to Google) is not supported. Conversion from remote is always to "builtin". Then the user initiates a conversion from "builtin" to remote. Note that longer term, the plan is to permit multiple login options to the same Dataverse account per https://github.com/IQSS/dataverse/issues/3487 (so all this talk of conversion will be moot) but for now users can only use a single login option, as explained in the :doc:`/user/account` section of the User Guide. In short, "remote only" might work for you if you only plan to use a single remote authentication provider such that no conversion between remote authentication providers will be necessary. File Storage: Local Filesystem vs. Swift vs. S3 ----------------------------------------------- From 4c65c92126cd7e7e19332d723f8b5647446b6097 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 20 Aug 2019 15:07:18 -0400 Subject: [PATCH 20/94] wrap long lines in curl commands, etc #6086 --- doc/sphinx-guides/source/_static/docsdataverse_org.css | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/_static/docsdataverse_org.css b/doc/sphinx-guides/source/_static/docsdataverse_org.css index 29b6a08ab59..7ed784e91e0 100755 --- a/doc/sphinx-guides/source/_static/docsdataverse_org.css +++ b/doc/sphinx-guides/source/_static/docsdataverse_org.css @@ -114,4 +114,8 @@ div.form-group .glyphicon.glyphicon-asterisk {font-size: .5em; vertical-align: t /* #sidebar.bs-sidenav.affix { position: static; -} REMOVED STATIC ToC */ \ No newline at end of file +} REMOVED STATIC ToC */ + +pre { + white-space: pre-wrap; +} From 39cd360277f13a1f77dd27cf97f53296ee491487 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 20 Aug 2019 15:07:57 -0400 Subject: [PATCH 21/94] add beginner friendly API documentation #6086 --- doc/sphinx-guides/source/admin/apis.rst | 0 .../source/admin/troubleshooting.rst | 26 +- doc/sphinx-guides/source/api/apps.rst | 86 +++++-- doc/sphinx-guides/source/api/auth.rst | 63 +++++ doc/sphinx-guides/source/api/faq.rst | 95 ++++++++ .../source/api/getting-started.rst | 146 +++++++++++ doc/sphinx-guides/source/api/index.rst | 6 +- doc/sphinx-guides/source/api/intro.rst | 229 ++++++++++++++++-- doc/sphinx-guides/source/api/native-api.rst | 185 ++++++++++++-- .../source/installation/oauth2.rst | 2 + .../source/installation/shibboleth.rst | 2 + doc/sphinx-guides/source/user/account.rst | 32 ++- 12 files changed, 804 insertions(+), 68 deletions(-) create mode 100644 doc/sphinx-guides/source/admin/apis.rst create mode 100644 doc/sphinx-guides/source/api/auth.rst create mode 100644 doc/sphinx-guides/source/api/faq.rst create mode 100644 doc/sphinx-guides/source/api/getting-started.rst diff --git a/doc/sphinx-guides/source/admin/apis.rst b/doc/sphinx-guides/source/admin/apis.rst new file mode 100644 index 00000000000..e69de29bb2d diff --git a/doc/sphinx-guides/source/admin/troubleshooting.rst b/doc/sphinx-guides/source/admin/troubleshooting.rst index 3e8cfbfa62f..1b22a58555b 100644 --- a/doc/sphinx-guides/source/admin/troubleshooting.rst +++ b/doc/sphinx-guides/source/admin/troubleshooting.rst @@ -3,11 +3,35 @@ Troubleshooting =============== -This new (as of v.4.6) section of the Admin guide is for tips on how to diagnose and fix system problems. +Sometimes Dataverse users get into trouble. Sometimes Dataverse itself gets into trouble. If something has gone wrong, this section is for you. .. contents:: Contents: :local: +Using Dataverse APIs to Troubleshoot and Fix Problems +----------------------------------------------------- + +See the :doc:`/api/intro` section of the API Guide for a high level overview of Dataverse APIs. Below are listed problems that support teams might encounter that can be handled via API (sometimes only via API). + +A Dataset Is Locked And Cannot Be Edited or Published +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +It's normal for the ingest process described in the :doc:`/user/tabulardataingest/ingestprocess` section of the User Guide to take some time but if hours or days have passed and the dataset is still locked, you might want to inspect the locks and consider deleting some or all of them. + +See :doc:`dataverses-datasets`. + +Someone Created Spam Datasets and I Need to Delete Them +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Depending on how open your installation of Dataverse is to the general public creating datasets, you may sometimes need to deal with spam datasets. + +Look for "destroy" in the :doc:`/api/native-api` section of the API Guide. + +A User Needs Their Account to Be Converted From Institutional (Shibboleth), ORCID, Google, or GitHub to Something Else +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +See :ref:`converting-shibboleth-users-to-local` and :ref:`converting-oauth-users-to-local`. + Glassfish --------- diff --git a/doc/sphinx-guides/source/api/apps.rst b/doc/sphinx-guides/source/api/apps.rst index 5e26261ca71..f9d8d4c9b02 100755 --- a/doc/sphinx-guides/source/api/apps.rst +++ b/doc/sphinx-guides/source/api/apps.rst @@ -1,9 +1,9 @@ Apps ==== -The introduction of Dataverse APIs has fostered the development of apps that are listed at http://dataverse.org/integrations and the :doc:`/admin/integrations` section of the Admin Guide. +The introduction of Dataverse APIs has fostered the development of a variety of software applications that are listed in the :doc:`/admin/integrations` and :doc:`/admin/reporting-tools` sections of the Admin Guide and the :doc:`/installation/external-tools` section of the Installation Guide. -The apps below are open source, demonstrating how to use Dataverse APIs. Some of these apps (and others) are built on :doc:`/api/client-libraries` that are available for Dataverse APIs. +The apps below are open source and demonstrate how to use Dataverse APIs. Some of these apps are built on :doc:`/api/client-libraries` that are available for Dataverse APIs in Python, R, and Java. .. contents:: |toctitle| :local: @@ -11,6 +11,27 @@ The apps below are open source, demonstrating how to use Dataverse APIs. Some of Javascript ---------- +Data Explorer +~~~~~~~~~~~~~ + +Data Explorer is a GUI which lists the variables in a tabular data file allowing searching, charting and cross tabulation analysis. + +https://github.com/scholarsportal/Dataverse-Data-Explorer + +Data Curation Tool +~~~~~~~~~~~~~~~~~~ + +Data Curation Tool is a GUI for curating data by adding labels, groups, weights and other details to assist with informed reuse. + +https://github.com/scholarsportal/Dataverse-Data-Curation-Tool + +File Previewers +~~~~~~~~~~~~~~~ + +File Previewers are tools that display the content of files - including audio, html, Hypothes.is annotations, images, PDF, text, video - allowing them to be viewed without downloading. + +https://github.com/QualitativeDataRepository/dataverse-previewers + TwoRavens ~~~~~~~~~ @@ -18,23 +39,31 @@ TwoRavens is a system of interlocking statistical tools for data exploration, an https://github.com/IQSS/TwoRavens -PHP ---- +Python +------ -OJS -~~~ +Please note that there are multiple Python modules for Dataverse APIs listed in the :doc:`client-libraries` section. -The Open Journal Systems (OJS) Dataverse Plugin adds data sharing and preservation to the OJS publication process. +dataverse-sample-data +~~~~~~~~~~~~~~~~~~~~~ -https://github.com/pkp/ojs/tree/ojs-stable-2_4_8/plugins/generic/dataverse +dataverse-sample-data allows you to populate your Dataverse installation with sample data. It makes uses of pyDataverse, which is listed in the :doc:`client-libraries` section. -Python ------- +https://github.com/IQSS/dataverse-sample-data + +Texas Digital Library dataverse-reports +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Dataverse Reports for Texas Digital Library generates and emails statistical reports for an installation of Dataverse using the native API and database queries. + +https://github.com/TexasDigitalLibrary/dataverse-reports OSF ~~~ -Allows you to view, download, and upload files to and from a Dataverse dataset from an Open Science Framework (OSF) project: https://github.com/CenterForOpenScience/osf.io/tree/develop/addons/dataverse +OSF allows you to view, download, and upload files to and from a Dataverse dataset from an Open Science Framework (OSF) project. + +https://github.com/CenterForOpenScience/osf.io/tree/develop/addons/dataverse GeoConnect ~~~~~~~~~~ @@ -46,22 +75,49 @@ https://github.com/IQSS/geoconnect dataverse-metrics ~~~~~~~~~~~~~~~~~ -dataverse-metrics aggregates and visualizes metrics across multiple Dataverse installations but can also be used with a single installation: https://github.com/IQSS/dataverse-metrics +dataverse-metrics aggregates and visualizes metrics across multiple Dataverse installations but can also be used with a single installation + +https://github.com/IQSS/dataverse-metrics + +Whole Tale +~~~~~~~~~~ + +Whole Tale enables researchers to analyze data using popular tools including Jupyter and RStudio with the ultimate goal of supporting publishing of reproducible research packages. + +https://github.com/whole-tale/girder_wholetale/tree/v0.7/server/lib/dataverse + +Archivematica +~~~~~~~~~~~~~ + +Archivematica is an integrated suite of open-source tools for processing digital objects for long-term preservation. + +https://github.com/artefactual/archivematica/tree/v1.9.2/src/MCPClient/lib/clientScripts Java ---- +Please note that there is a Java library for Dataverse APIs listed in the :doc:`client-libraries` section. + DVUploader -~~~~~~~~~~~~~~~~~~~~~ +~~~~~~~~~~ The open-source DVUploader tool is a stand-alone command-line Java application that uses the Dataverse API to upload files to a specified Dataset. Files can be specified by name, or the DVUploader can upload all files in a directory or recursively from a directory tree. The DVUploader can also verify that uploaded files match their local sources by comparing the local and remote fixity checksums. Source code, release 1.0.0- jar file, and documentation are available on GitHub. DVUploader's creation was supported by the Texas Digital Library. https://github.com/IQSS/dataverse-uploader - Dataverse for Android ~~~~~~~~~~~~~~~~~~~~~ -For now this is only a proof of concept. +Dataverse for Android makes use of Dataverse's Search API. https://github.com/IQSS/dataverse-android + +PHP +--- + +OJS +~~~ + +The Open Journal Systems (OJS) Dataverse Plugin adds data sharing and preservation to the OJS publication process. + +https://github.com/pkp/ojs/tree/ojs-stable-2_4_8/plugins/generic/dataverse diff --git a/doc/sphinx-guides/source/api/auth.rst b/doc/sphinx-guides/source/api/auth.rst new file mode 100644 index 00000000000..21e38424549 --- /dev/null +++ b/doc/sphinx-guides/source/api/auth.rst @@ -0,0 +1,63 @@ +API Tokens and Authentication +============================= + +An API token is similar to a password and allows you to authenticate to Dataverse APIs to perform actions as you. Many Dataverse APIs require the use of an API token. + +.. contents:: |toctitle| + :local: + +How to Get an API Token +----------------------- + +Your API token is unique to the server you are using. You cannot use your API token from one server on another server. + +Instructions for getting a token are described in the :doc:`/user/account` section of the User Guide. + +How Your API Token Is Like a Password +------------------------------------- + +Anyone who has your API Token can add and delete data as you so you should treat it with the same care as a password. + +Passing Your API Token as an HTTP Header (Preferred) or a Query Parameter +------------------------------------------------------------------------- + +See :ref:`curl-examples-and-environment-variables` if you are unfamiliar with the use of ``export`` below. + +There are two ways to pass your API token to Dataverse APIs. The preferred method is to send the token in the ``X-Dataverse-key`` HTTP header, as in the following curl example. + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ALIAS=root + + curl -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/dataverses/$ALIAS/contents + +Here's how it looks without the environment variables: + +.. code-block:: bash + + curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx https://demo.dataverse.org/api/dataverses/root/contents + +The second way to pass your API token is via a query parameter called ``key`` in the URL like below. + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ALIAS=root + + curl $SERVER_URL/api/dataverses/$ALIAS/contents?key=$API_TOKEN + +Here's how it looks without the environment variables: + +.. code-block:: bash + + curl https://demo.dataverse.org/api/dataverses/root/contents?key=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + +Use of the ``X-Dataverse-key`` HTTP header form is preferred to passing ``key`` in the URL because query parameters like ``key`` appear in URLs and might accidentally get shared, exposing your API token. (Again it's like a password.) Additionally, URLs are often logged on servers while it's less common to log HTTP headers. + +Resetting Your API Token +------------------------ + +You can reset your API Token from your account page in Dataverse as described in the :doc:`/user/account` section of the User Guide. diff --git a/doc/sphinx-guides/source/api/faq.rst b/doc/sphinx-guides/source/api/faq.rst new file mode 100644 index 00000000000..0f0d71d775b --- /dev/null +++ b/doc/sphinx-guides/source/api/faq.rst @@ -0,0 +1,95 @@ +Frequently Asked Questions +========================== + +APIs are less intuitive than graphical user interfaces (GUIs) so questions are expected! + +.. contents:: |toctitle| + :local: + +What is an API? +--------------- + +See "What is an API?" in the :doc:`intro` section. + +What Are Common Use Cases for Dataverse APIs? +--------------------------------------------- + +See the :doc:`getting-started` section for common use cases for researchers and curators. Other types of API users should find starting points at :ref:`types-of-api-users`. + +Where Can I Find Examples of Using Dataverse APIs? +-------------------------------------------------- + +See the :doc:`getting-started` section links to examples using curl. + +For examples in Javascript, Python, R, and Java, and PHP, see the :doc:`apps` and :doc:`client-libraries` sections. + +When Should I Use the Native API vs. the SWORD API? +--------------------------------------------------- + +The :doc:`sword` is based on a standard, works fine, and is fully supported, but much more development effort has been going into the :doc:`native-api`, which is not based on a standard. It is specific to Dataverse. + +SWORD uses XML. The Native API uses JSON. + +SWORD only supports a dozen or so operations. The Native API supports many more. + +To Operate on a Dataset Should I Use Its DOI (or Handle) or Its Database ID? +---------------------------------------------------------------------------- + +It's fine to target a datasets using either its Persistent ID (PID such as DOI or Handle) or its database id. + +Here's an example from :ref:`publish-dataset-api` of targeting a dataset using its DOI: + +.. code-block:: bash + + curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST "https://demo.dataverse.org/api/datasets/:persistentId/actions/:publish?persistentId=doi:10.5072/FK2/J8SJZB&type=major" + +You can target the same dataset with its database ID ("42" in the example below), like this: + +.. code-block:: bash + + curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST "https://demo.dataverse.org/api/datasets/42/actions/:publish?type=major" + +Note that when multiple query parameters are used (such as ``persistentId`` and ``type`` above) there is a question mark (``?``) before the first query parameter and ampersands (``&``) before each of the subsequent query parameters. Also, ``&`` has special meaning in Unix shells such as Bash so you must put quotes around the entire URL. + +Where is the Comprehensive List of All API Functionality? +--------------------------------------------------------- + +There are so many Dataverse APIs that a single page in this guide would probably be overwhelming. See :ref:`list-of-dataverse-apis` for links to various pages. + +It's possible to get a complete list of API functionality in Swagger/OpenAPI format if you deploy Dataverse to Payara 5+. For details, see https://github.com/IQSS/dataverse/issues/5794 + +Is There a Changelog of API Functionality That Has Been Added Over Time? +------------------------------------------------------------------------ + +No, but there probably should be. If you have suggestions for how it should look, please create an issue at https://github.com/IQSS/dataverse/issues + +.. _no-api: + +What Funtionality is GUI Only and Not Available Via API +------------------------------------------------------- + +The following tasks cannot currently be automated via API because no API exists for them. The web interface should be used instead for these GUI-only features: + +- Setting a logo image, URL, and tagline when creating a dataverse. +- Editing properties of an existing dataverse. +- Set "Enable Access Request" for Terms of Use: https://groups.google.com/d/msg/dataverse-community/oKdesT9rFGc/qM6wrsnnBAAJ +- Downloading a guestbook. +- Set guestbook_id for a dataset: https://groups.google.com/d/msg/dataverse-community/oKdesT9rFGc/qM6wrsnnBAAJ +- Filling out a guestbook. See also https://groups.google.com/d/msg/dataverse-dev/G9FNGP_bT0w/dgE2Fk4iBQAJ +- Seeing why a file failed ingest. +- Dataset templates. +- Deaccessioning datasets. + +If you would like APIs for any of the features above, please open a GitHub issue at https://github.com/IQSS/dataverse/issues + +You are also welcome to open an issue to add to the list above. Or you are welcome to make a pull request. Please see the :doc:`/developers/documentation` section of the Developer Guide for instructions. + +Why Aren't the Return Values (HTTP Status Codes) Documented? +------------------------------------------------------------ + +They should be. Please consider making a pull request to help. The :doc:`/developers/documentation` section of the Developer Guide should help you get started. :ref:`create-dataverse-api` has an example you can follow or you can come up with a better way. + +What If My Question Isn't Answered Here? +---------------------------------------- + +Please ask! For information on where to ask, please see :ref:`getting-help-with-apis`. diff --git a/doc/sphinx-guides/source/api/getting-started.rst b/doc/sphinx-guides/source/api/getting-started.rst new file mode 100644 index 00000000000..a1e957de24f --- /dev/null +++ b/doc/sphinx-guides/source/api/getting-started.rst @@ -0,0 +1,146 @@ +Getting Started with APIs +========================= + +If you are a researcher or curator who wants to automate parts of your workflow, this section should help you get started. The :doc:`intro` section lists resources for other groups who may be interested in Dataverse APIs such as developers of integrations and support teams. + +.. contents:: |toctitle| + :local: + +Servers You Can Test With +------------------------- + +Rather than using a production installation of Dataverse, API users are welcome to use http://demo.dataverse.org for testing. You can email support@dataverse.org if you have any trouble with this server. + +If you would rather have full control over your own test server, deployments to AWS, Docker, Vagrant, and more are covered in the :doc:`/developers/index` and the :doc:`/installation/index`. + +Getting an API Token +-------------------- + +Many Dataverse APIs require an API token. + +Once you have identified a server to test with, create an account, click on your name, and get your API token. For more details, see the :doc:`auth` section. + +.. _curl-examples-and-environment-variables: + +curl Examples and Enviroment Variables +-------------------------------------- + +The examples in this guide use `curl`_ for the following reasons: + +- curl commands are succinct. +- curl commands can be copied and pasted into a terminal. +- This guide is programming language agnostic. It doesn't prefer any particular programming language. + +You'll find curl examples that look like this: + +.. code-block:: bash + + export SERVER_URL=https://demo.dataverse.org + export QUERY=data + + curl $SERVER_URL/api/search?q=$QUERY + +What's going on above is the declaration of "environment variables" that are substituted into a curl command. You should run the "export" commands but change the value for the server URL or the query (or whatever options the command supports). Then you should be able to copy and paste the curl command and it should "just work", substituting the variables like this: + +.. code-block:: bash + + curl https://demo.dataverse.org/api/search?q=data + +If you ever want to check an environment variable, you can "echo" it like this: + +.. code-block:: bash + + echo $SERVER_URL + +If you don't like curl, don't have curl, or want to use a different programming language, you are encouraged to check out the Python, R, and Java options in the :doc:`client-libraries` section. + +.. _curl: https://curl.haxx.se + +Depositing Data +--------------- + +Creating a Dataverse +~~~~~~~~~~~~~~~~~~~~ + +See :ref:`create-dataverse-api`. + +Creating a Dataset +~~~~~~~~~~~~~~~~~~ + +See :ref:`create-dataset-command`. + +Uploading Files +~~~~~~~~~~~~~~~ + +See :ref:`add-file-api`. + +Publishing a Dataverse +~~~~~~~~~~~~~~~~~~~~~~ + +See :ref:`publish-dataverse-api`. + +Publishing a Dataset +~~~~~~~~~~~~~~~~~~~~ + +See :ref:`publish-dataset-api`. + +Finding and Downloading Data +---------------------------- + +Finding Datasets +~~~~~~~~~~~~~~~~ + +A quick example search for the word "data" is https://demo.dataverse.org/api/search?q=data + +See the :doc:`search` section for details. + +Downloading Files +~~~~~~~~~~~~~~~~~ + +The :doc:`dataaccess` section explains how to download files. + +In order to download files, you must know their database IDs which you can get from the ``dataverse_json`` metadata at the dataset level. See :ref:`export-dataset-metadata-api`. + +Downloading Metadata +~~~~~~~~~~~~~~~~~~~~ + +Dataset metadata is availabe in a variety of formats listed at :ref:`metadata-export-formats`. + +See :ref:`export-dataset-metadata-api`. + +Listing the Contents of a Dataverse +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +See :ref:`show-contents-of-a-dataverse-api`. + +Managing Permissions +-------------------- + +Granting Permission +~~~~~~~~~~~~~~~~~~~ + +See :ref:`assign-role-on-a-dataverse-api`. + +Revoking Permission +~~~~~~~~~~~~~~~~~~~ + +See :ref:`revoke-role-on-a-dataverse-api`. + +Listing Permissions (Role Assignments) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +See :ref:`list-role-assignments-on-a-dataverse-api`. + +Beyond "Getting Started" Tasks +------------------------------ + +In addition to the tasks listed above, Dataverse supports many other operations via API. + +See :ref:`list-of-dataverse-apis` and :ref:`types-of-api-users` to get oriented. + +If you're looking for some inspiration for how you can use Dataverse APIs, there are open source projects that integrate with Dataverse listed in the :doc:`apps` section. + +Getting Help +------------- + +See :ref:`getting-help-with-apis`. diff --git a/doc/sphinx-guides/source/api/index.rst b/doc/sphinx-guides/source/api/index.rst index dd70e871bd0..e70c369eeeb 100755 --- a/doc/sphinx-guides/source/api/index.rst +++ b/doc/sphinx-guides/source/api/index.rst @@ -10,11 +10,15 @@ API Guide .. toctree:: + new intro - sword + getting-started + auth search dataaccess native-api metrics + sword client-libraries apps + faq diff --git a/doc/sphinx-guides/source/api/intro.rst b/doc/sphinx-guides/source/api/intro.rst index 6e0e1a9e0d7..fce2824faa4 100755 --- a/doc/sphinx-guides/source/api/intro.rst +++ b/doc/sphinx-guides/source/api/intro.rst @@ -1,52 +1,235 @@ Introduction ============ -We encourage anyone interested in building tools that interoperate with Dataverse to utilize our APIs. The Dataverse community has supplied :doc:`client-libraries` for Python, R, and Java and we are always interested in helping the community develop libraries for additional languages. The :doc:`apps` section links to open source Javascript, PHP, Python, and Java code that you can learn from while developing against Dataverse APIs. +Dataverse APIs allow users to accomplish many tasks such as... + +- creating datasets +- uploading files +- publishing datasets +- and much, much more + +... all without using the Dataverse web interface. + +APIs open the door for integrations between Dataverse and other software. For a list, see the :doc:`/admin/integrations` section of the Admin Guide. .. contents:: |toctitle| :local: +What is an API? +--------------- + +API stands for "Application Programming Interface" and an example is Dataverse's "file upload" API. In the diagram below, we can see that while users can click a button within Dataverse's web interface to upload a file, there are many other ways to get files into Dataverse, all using an API that allows for uploading of files. + +.. graphviz:: + + digraph { + //rankdir="LR"; + node [fontsize=10] + + browser [label="Web Browser"] + terminal [label="Terminal"] + + osf [label="OSF",shape=box] + ojs [label="OJS",shape=box] + rspace [label="RSpace",shape=box] + uploader [label="DvUploader"] + script [label="Script\n(Python,\nR, etc.)"] + + addfilebutton [label="Add File Button"] + addfileapi [label="Add File API"] + storage [label="Storage",shape=box3d] + + terminal -> script + terminal -> uploader + + browser -> ojs + browser -> osf + browser -> rspace + browser -> addfilebutton + + uploader -> addfileapi + ojs -> addfileapi + osf -> addfileapi + rspace -> addfileapi + script -> addfileapi + + subgraph cluster_dataverse { + label="Dataverse" + labeljust="r" + labelloc="b" + addfilebutton -> storage + addfileapi -> storage + } + } + +The components above that use the "file" upload API are: + +- DvUploader is terminal-based application for uploading files that is described in the :doc:`/user/dataset-management` section of the User Guide. +- OJS, OSF, and RSpace are all web applications that can integrate with Dataverse and are described in "Getting Data In" in the :doc:`/admin/integrations` section of the Admin Guide. +- The script in the diagram can be as simple as single line of code that is run in a terminal. You can copy and paste "one-liners" like this from the guide. See the :doc:`getting-started` section for examples using a tool called "curl". + +The diagram above shows only a few examples of software using a specific API but many more APIs are available. + +.. _types-of-api-users: + +Types of Dataverse API Users +---------------------------- + +This guide is intended to serve multiple audiences but pointers various sections of the guide are provided below based on the type of API user you are. + +API Users Within a Single Installation of Dataverse +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Each installation of Dataverse will have its own groups of people interested in APIs. + +Users of Integrations and Apps +++++++++++++++++++++++++++++++ + +Integrations and apps can take many forms but two examples are: + +- Using Open Science Framework (OSF), a web application, to deposit and publish data into Dataverse. +- Using DVUploader, a terminal-based desktop application, to upload files into Dataverse. + +In both examples, users need to obtain an API Token to authenticate with Dataverse. + +|Start| A good starting point is "API Tokens" in the :doc:`/user/account` section of the User Guide. DvUploader is documented in the :doc:`/user/dataset-management` section of the User Guide. The integrations that are enabled depend on your installation of Dataverse. You can find a list in the :doc:`/admin/integrations` section of the Admin Guide. + +Power Users ++++++++++++ + +Power users may be researchers or curators who are comfortable with automating parts of their workflow by writing Python code or similar. + +|Start| The recommended starting point for power users is the :doc:`getting-started` section. + +Support Teams and Superusers +++++++++++++++++++++++++++++ + +Support teams that answer questions about their installation of Dataverse should familiarized themselves with the :doc:`getting-started` section to get a sense of common tasks that researchers and curators might be trying to accomplish by using Dataverse APIs. + +Superusers of an installation of Dataverse have access a superuser dashboard described in the :doc:`/admin/dashboard` section of the Admin Guide but some operations can only be done via API. + +|Start| A good starting point for both groups is the :doc:`getting-started` section of this guide followed by the :doc:`/admin/troubleshooting` section of the Admin Guide. + +Sysadmins ++++++++++ + +Sysadmins often write scripts to automate tasks and Dataverse APIs make this possible. Sysadmins have control over the server that Dataverse is running on and may be called upon to execute API commands that are limited to "localhost" (the server itself) for security reasons. + +|Start| A good starting point for sysadmins is "Blocking API Endpoints" in the :doc:`/installation/config` section of the Installation Guide, followed by the :doc:`getting-started` section of this guide, followed by the :doc:`/admin/troubleshooting` section of the Admin Guide. + +In House Developers ++++++++++++++++++++ + +Some organizations that run Dataverse employ developers who are tasked with using Dataverse APIs to accomplish specific tasks such as building custom integrations with in house systems or creating reports specific to the organization's needs. + +|Start| A good starting point for in house developers is the :doc:`getting-started` section. + +API Users Across the Dataverse Project +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The Dataverse project loves contributors! Depending on your interests and skills, you might fall into one or more of the groups below. + +Developers of Integrations, External Tools, and Apps +++++++++++++++++++++++++++++++++++++++++++++++++++++ + +One of the primary purposes for Dataverse APIs in the first place is to enable integrations with third party software. Integrations are listed in the following places: + +- The :doc:`/admin/integrations` section of the Admin Guide. +- The :doc:`/installation/external-tools` section of the Installation Guide. +- The :doc:`apps` section of this guide. + +|Start| Good starting points are the three sections above to get a sense of third-party software that already integrates with Dataverse, followed by the :doc:`getting-started` section. + +Developers of Dataverse API Client Libraries +++++++++++++++++++++++++++++++++++++++++++++ + +A client library helps developers using a specific programming language such as Python, R, or Java interact with Dataverse APIs in a manner that is idiomatic for their language. For example, a Python programmer may want to + +|Start| A good starting point is the :doc:`client-libraries` section, followed by the :doc:`getting-started` section. + +Developers of Dataverse Itself +++++++++++++++++++++++++++++++ + +Developers working on Dataverse itself use Dataverse APIs when adding features, fixing bugs, and testing those features and bug fixes. + +|Start| A good starting point is the :doc:`/developers/testing` section of the Developer Guide. + +.. |Start| raw:: html + + + Starting point +   + How This Guide is Organized --------------------------- -We document the Dataverse API in five sections: +Getting Started +~~~~~~~~~~~~~~~ + +See :doc:`getting-started` + +API Tokens and Authentication +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +See :doc:`auth`. + +.. _list-of-dataverse-apis: + +Lists of Dataverse APIs +~~~~~~~~~~~~~~~~~~~~~~~ -- :doc:`sword`: For depositing data using a standards-based approach rather than the :doc:`native-api`. - :doc:`search`: For searching dataverses, datasets, and files. - :doc:`dataaccess`: For downloading and subsetting data. -- :doc:`native-api`: For performing most tasks that are possible in the GUI. +- :doc:`native-api`: For performing most tasks that are possible in the GUI. See :doc:`getting-started` for the most common commands which operate on endpoints with names like: + + - Dataverses + - Datasets + - Files + - etc. + - :doc:`metrics`: For query statisics about usage of a Dataverse installation. +- :doc:`sword`: For depositing data using a standards-based approach rather than the :doc:`native-api`. + +Please note that some APIs are only documented in other guides that are more suited to their audience: -We use the term "native" to mean that the API is not based on any standard. For this reason, the :doc:`search` and :doc:`dataaccess` could also be considered "native" and in the future we may reorganize the API Guide to split the :doc:`native-api` section into "Datasets API", "Files" API, etc. +- Admin Guide -Authentication --------------- + - :doc:`/admin/metadatacustomization` + - :doc:`/admin/metadataexport` + - :doc:`/admin/make-data-count` + - :doc:`/admin/geoconnect-worldmap` + - :doc:`/admin/solr-search-index` -Most Dataverse APIs require the use of an API token. (In code we sometimes call it a "key" because it's shorter.) Instructions for getting a token are described in the :doc:`/user/account` section of the User Guide. +- Installation Guide -There are two ways to pass your API token to Dataverse APIs. The preferred method is to send the token in the ``X-Dataverse-key`` HTTP header, as in the following curl example:: + - :doc:`/installation/config` + - :doc:`/installation/external-tools` - curl -H "X-Dataverse-key: 8b955f87-e49a-4462-945c-67d32e391e7e" https://demo.dataverse.org/api/datasets/:persistentId?persistentId=doi:TEST/12345 +Client Libraries +~~~~~~~~~~~~~~~~ -Throughout this guide you will often see Bash shell envionmental variables being used, like this:: +See :doc:`client-libraries` for how to use Dataverse APIs from Python, R, and Java. - export API_TOKEN='8b955f87-e49a-4462-945c-67d32e391e7e' - curl -H "X-Dataverse-key: $API_TOKEN" https://demo.dataverse.org/api/datasets/:persistentId?persistentId=doi:TEST/12345 +Examples +~~~~~~~~ -The second way to pass your API token is via an extra query parameter called ``key`` in the URL like this:: +:doc:`apps` links to example open source code you can study. :doc:`getting-started` also has many examples. - curl "https://demo.dataverse.org/api/datasets/:persistentId?persistentId=doi:TEST/12345&key=$API_TOKEN" +Frequently Asked Questions +~~~~~~~~~~~~~~~~~~~~~~~~~~ -Use of the ``X-Dataverse-key`` HTTP header form is preferred because putting the query parameters in URLs often results in them finding their way into web server access logs. Your API token should be kept as secret as your password because it can be used to perform any action *as you* in the Dataverse application. +See :doc:`faq`. -Testing -------- +.. _getting-help-with-apis: -Rather than using a production installation of Dataverse, API users are welcome to use http://demo.dataverse.org for testing. +Getting Help +------------ -Support -------- +Dataverse API questions are on topic in all the usual places: -If you are using the APIs for an installation of Dataverse hosted by your institution, you may want to reach out to the team that supports it. In the header at the top of the site, there should be a form you can fill out by clicking the "Support" link. +- The dataverse-community Google Group: https://groups.google.com/forum/#!forum/dataverse-community +- Dataverse community calls: https://dataverse.org/community-calls +- The Dataverse chat room: http://chat.dataverse.org +- The Dataverse ticketing system: support@dataverse.org -If you are having trouble with http://demo.dataverse.org or have questions about the APIs, please feel free to reach out to the Dataverse community via https://groups.google.com/forum/#!forum/dataverse-community . +After your question has been answered, you are welcome to help improve the :doc:`faq` section of this guide. diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index c5bee4e250c..382c84191ee 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -5,7 +5,7 @@ Dataverse 4 exposes most of its GUI functionality via a REST-based API. This sec .. note:: |CORS| Some API endpoint allow CORS_ (cross-origin resource sharing), which makes them usable from scripts runing in web browsers. These endpoints are marked with a *CORS* badge. -.. note:: Bash environment variables shown below. The idea is that you can "export" these environment variables before copying and pasting the commands that use them. For example, you can set ``$SERVER_URL`` by running ``export SERVER_URL="https://demo.dataverse.org"`` in your Bash shell. To check if the environment variable was set properly, you can "echo" it (e.g. ``echo $SERVER_URL``). +.. note:: Bash environment variables shown below. The idea is that you can "export" these environment variables before copying and pasting the commands that use them. For example, you can set ``$SERVER_URL`` by running ``export SERVER_URL="https://demo.dataverse.org"`` in your Bash shell. To check if the environment variable was set properly, you can "echo" it (e.g. ``echo $SERVER_URL``). See also :ref:`curl-examples-and-environment-variables`. .. _CORS: https://www.w3.org/TR/cors/ @@ -17,17 +17,20 @@ Dataverse 4 exposes most of its GUI functionality via a REST-based API. This sec Dataverses ---------- +.. _create-dataverse-api: + Create a Dataverse ~~~~~~~~~~~~~~~~~~ -Generates a new dataverse under ``$id``. Expects a JSON content describing the dataverse, as in the example below. -If ``$id`` is omitted, a root dataverse is created. ``$id`` can either be a dataverse id (long) or a dataverse alias (more robust). In the example below, "root" is the id, which means that the dataverse will be created as a child of the root dataverse:: +A dataverse is a container for datasets and other dataverses as explained in the :doc:`/user/dataverse-management` section of the User Guide. -``export id=root` +The steps for creating a dataverse are: -``curl -H "X-Dataverse-key:$API_TOKEN" -X POST $SERVER_URL/api/dataverses/$id --upload-file dataverse-complete.json`` +- Prepare a JSON file containing the name, description, etc, of the dataverse you'd like to create. +- Figure out the alias or database id of the "parent" dataverse into which you will be creating your new dataverse. +- Execute a curl command or equivalent. -Download the :download:`JSON example <../_static/api/dataverse-complete.json>` file and modified to create dataverses to suit your needs. The fields ``name``, ``alias``, and ``dataverseContacts`` are required. The controlled vocabulary for ``dataverseType`` is +Download :download:`dataverse-complete.json <../_static/api/dataverse-complete.json>` file and modify it to suit your needs. The fields ``name``, ``alias``, and ``dataverseContacts`` are required. The controlled vocabulary for ``dataverseType`` is the following: - ``DEPARTMENT`` - ``JOURNALS`` @@ -41,6 +44,28 @@ Download the :download:`JSON example <../_static/api/dataverse-complete.json>` f .. literalinclude:: ../_static/api/dataverse-complete.json +The curl command below assumes you have kept the name "dataverse-complete.json" and that this file is in your current working directory. + +Next you need to figure out the alias or database id of the "parent" dataverse into which you will be creating your new dataverse. Out of the box the top level dataverse has an alias of "root" and a database id of "1" but your installation may vary. The easiest way to determine the alias of your root dataverse is to click "Advanced Search" and look at the URL. You may also choose a parent under the root. + +.. note:: See :ref:`curl-examples-and-environment-variables` if you are unfamiliar with the use of ``export`` below. + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export PARENT=root + export SERVER_URL=https://demo.dataverse.org + + curl -H X-Dataverse-key:$API_TOKEN -X POST $SERVER_URL/api/dataverses/$PARENT --upload-file dataverse-complete.json + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST https://demo.dataverse.org/api/dataverses/root --upload-file dataverse-complete.json + +You should expect a 201 ("CREATED") response and JSON indicating the database id that has been assigned to your newly created dataverse. + .. _view-dataverse: View a Dataverse @@ -59,13 +84,28 @@ Deletes the dataverse whose ID is given: ``curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE $SERVER_URL/api/dataverses/$id`` +.. _show-contents-of-a-dataverse-api: + Show Contents of a Dataverse ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -|CORS| Lists all the DvObjects under dataverse ``id``. :: +|CORS| Lists all the dataverses and datasets directly under a dataverse (direct children only). You must specify the "alias" of a dataverse or its database id. If you specify your API token and have access, unpublished dataverses and datasets will be included in the listing. + +.. note:: See :ref:`curl-examples-and-environment-variables` if you are unfamiliar with the use of ``export`` below. + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export ALIAS=root + export SERVER_URL=https://demo.dataverse.org + + curl -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/dataverses/$ALIAS/contents + +The fully expanded example above (without environment variables) looks like this: -``curl -H "X-Dataverse-key:$API_TOKEN" http://$SERVER_URL/api/dataverses/$id/contents`` +.. code-block:: bash + curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx https://demo.dataverse.org/api/dataverses/root/contents Report the data (file) size of a Dataverse ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -119,6 +159,8 @@ POSTed JSON example:: ] } +.. _list-role-assignments-on-a-dataverse-api: + List Role Assignments in a Dataverse ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -135,6 +177,7 @@ Assign a default role to a user creating a dataset in a dataverse ``id`` where ` Note: You may use "none" as the ``roleAlias``. This will prevent a user who creates a dataset from having any role on that dataset. It is not recommended for dataverses with human contributors. +.. _assign-role-on-a-dataverse-api: Assign a New Role on a Dataverse ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -150,6 +193,8 @@ POSTed JSON example:: "role": "curator" } +.. _revoke-role-on-a-dataverse-api: + Delete Role Assignment from a Dataverse ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -194,9 +239,38 @@ values are ``true`` and ``false`` (both are valid JSON expressions). :: Create a Dataset in a Dataverse ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -To create a dataset, you must create a JSON file containing all the metadata you want such as in this example file: :download:`dataset-finch1.json <../../../../scripts/search/tests/data/dataset-finch1.json>`. Then, you must decide which dataverse to create the dataset in and target that datavese with either the "alias" of the dataverse (e.g. "root" or the database id of the dataverse (e.g. "1"). The initial version state will be set to ``DRAFT``:: +A dataset is a container for files as explained in the :doc:`/user/dataset-management` section of the User Guide. + +To create a dataset, you must supply a JSON file that contains at least the following required metadata fields: + +- Title +- Author +- Description +- Subject + +As a starting point, you can download :download:`dataset-finch1.json <../../../../scripts/search/tests/data/dataset-finch1.json>` and modify it to meet your needs. (In addition to this minimal example, you can download :download:`dataset-create-new-all-default-fields.json <../../../../scripts/api/data/dataset-create-new-all-default-fields.json>` which populates all of the metadata fields that ship with Dataverse.) + +The curl command below assumes you have kept the name "dataset-finch1.json" and that this file is in your current working directory. + +Next you need to figure out the alias or database id of the "parent" dataverse into which you will be creating your new dataset. Out of the box the top level dataverse has an alias of "root" and a database id of "1" but your installation may vary. The easiest way to determine the alias of your root dataverse is to click "Advanced Search" and look at the URL. You may also choose a parent dataverse under the root dataverse. + +.. note:: See :ref:`curl-examples-and-environment-variables` if you are unfamiliar with the use of ``export`` below. + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export PARENT=root + export SERVER_URL=https://demo.dataverse.org + + curl -H X-Dataverse-key:$API_TOKEN -X POST $SERVER_URL/api/dataverses/$PARENT/datasets --upload-file dataset-finch1.json + +The fully expanded example above (without the environment variables) looks like this: + +.. code-block:: bash - curl -H "X-Dataverse-key: $API_TOKEN" -X POST $SERVER_URL/api/dataverses/$DV_ALIAS/datasets --upload-file dataset-finch1.json + curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST https://demo.dataverse.org/api/dataverses/root/datasets --upload-file dataset-finch1.json + +You should expect a 201 ("CREATED") response and JSON indicating the database ID and Persistent ID (PID such as DOI or Handle) that has been assigned to your newly created dataset. Import a Dataset into a Dataverse ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -246,13 +320,30 @@ The file is a DDI xml file. * This API does not handle files related to the DDI file. * A Dataverse server can import datasets with a valid PID that uses a different protocol or authority than said server is configured for. However, the server will not update the PID metadata on subsequent update and publish actions. +.. _publish-dataverse-api: Publish a Dataverse ~~~~~~~~~~~~~~~~~~~ -Publish the Dataverse pointed by ``identifier``, which can either by the dataverse alias or its numerical id. :: +In order to publish a dataverse, you must know either its "alias" (which the GUI calls an "identifier") or its database ID. + +.. note:: See :ref:`curl-examples-and-environment-variables` if you are unfamiliar with the use of ``export`` below. + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export ALIAS=root + export SERVER_URL=https://demo.dataverse.org + + curl -H X-Dataverse-key:$API_TOKEN -X POST $SERVER_URL/api/dataverses/$ALIAS/actions/:publish + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST https://demo.dataverse.org/api/dataverses/root/actions/:publish - POST http://$SERVER/api/dataverses/$identifier/actions/:publish?key=$apiKey +You should expect a 200 ("OK") response and JSON output. Datasets -------- @@ -298,6 +389,8 @@ Get Version of a Dataset GET http://$SERVER/api/datasets/$id/versions/$versionNumber?key=$apiKey +.. _export-dataset-metadata-api: + Export Metadata of a Dataset in Various Formats ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -305,7 +398,8 @@ Export Metadata of a Dataset in Various Formats GET http://$SERVER/api/datasets/export?exporter=ddi&persistentId=$persistentId -.. note:: Supported exporters (export formats) are ``ddi``, ``oai_ddi``, ``dcterms``, ``oai_dc``, ``schema.org`` , ``OAI_ORE`` , ``Datacite``, ``oai_datacite`` and ``dataverse_json``. +.. note:: Supported exporters (export formats) are ``ddi``, ``oai_ddi``, ``dcterms``, ``oai_dc``, ``schema.org`` , ``OAI_ORE`` , ``Datacite``, ``oai_datacite`` and ``dataverse_json``. Descriptive names can be found under :ref:`metadata-export-formats` in the User Guide. + Schema.org JSON-LD ^^^^^^^^^^^^^^^^^^ @@ -382,19 +476,37 @@ You may delete some of the metadata of a dataset version by supplying a file wit For these deletes your JSON file must include an exact match of those dataset fields which you would like to delete. A sample JSON file may be downloaded here: :download:`dataset-delete-author-metadata.json <../_static/api/dataset-delete-author-metadata.json>` +.. _publish-dataset-api: + Publish a Dataset ~~~~~~~~~~~~~~~~~ -Publishes the dataset whose id is passed. If this is the first version of the dataset, its version number will be set to ``1.0``. Otherwise, the new dataset version number is determined by the most recent version number and the ``type`` parameter. Passing ``type=minor`` increases the minor version number (2.3 is updated to 2.4). Passing ``type=major`` increases the major version number (2.3 is updated to 3.0). Superusers can pass ``type=updatecurrent`` to update metadata without changing the version number:: +When publishing a dataset it's good to be aware of Dataverse's versioning system, which is described in the :doc:`/user/dataset-management` section of the User Guide. - POST http://$SERVER/api/datasets/$id/actions/:publish?type=$type&key=$apiKey +If this is the first version of the dataset, its version number will be set to ``1.0``. Otherwise, the new dataset version number is determined by the most recent version number and the ``type`` parameter. Passing ``type=minor`` increases the minor version number (2.3 is updated to 2.4). Passing ``type=major`` increases the major version number (2.3 is updated to 3.0). (Superusers can pass ``type=updatecurrent`` to update metadata without changing the version number.) -.. note:: POST should be used to publish a dataset. GET is supported for backward compatibility but is deprecated and may be removed: https://github.com/IQSS/dataverse/issues/2431 +.. note:: See :ref:`curl-examples-and-environment-variables` if you are unfamiliar with the use of ``export`` below. + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_ID=doi:10.5072/FK2/J8SJZB + export MAJOR_OR_MINOR=major + + curl -H X-Dataverse-key:$API_TOKEN -X POST \""$SERVER_URL/api/datasets/:persistentId/actions/:publish?persistentId=$PERSISTENT_ID&type=$MAJOR_OR_MINOR"\" -.. note:: When there are no default workflows, a successful publication process will result in ``200 OK`` response. When there are workflows, it is impossible for Dataverse to know - how long they are going to take and whether they will succeed or not (recall that some stages might require human intervention). Thus, - a ``202 ACCEPTED`` is returned immediately. To know whether the publication process succeeded or not, the client code has to check the status of the dataset periodically, - or perform some push request in the post-publish workflow. +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST "https://demo.dataverse.org/api/datasets/:persistentId/actions/:publish?persistentId=doi:10.5072/FK2/J8SJZB&type=major" + +The quotes around the URL are required because there is more than one query parameter separated by an ampersand (``&``), which has special meaning to Unix shells such as Bash. Putting the ``&`` in quotes ensures that "type" is interpreted as one of the query parameters. + +You should expect JSON output and a 200 ("OK") response in most cases. If you receive a 202 ("ACCEPTED") response, this is normal for installations that have workflows configured. Workflows are described in the :doc:`/developers/workflows` section of the Developer Guide. + +.. note:: POST should be used to publish a dataset. GET is supported for backward compatibility but is deprecated and may be removed: https://github.com/IQSS/dataverse/issues/2431 Delete Dataset Draft ~~~~~~~~~~~~~~~~~~~~ @@ -447,20 +559,41 @@ Delete a Private URL from a dataset (if it exists):: DELETE http://$SERVER/api/datasets/$id/privateUrl?key=$apiKey +.. _add-file-api: + Add a File to a Dataset ~~~~~~~~~~~~~~~~~~~~~~~ -Add a file to an existing Dataset. Description and tags are optional:: +When adding a file to a dataset, you can optionally specify the following: + +- A description of the file. +- The "File Path" of the file, indicating which folder the file should be uploaded to within the dataset. +- Whether or not the file is restricted. + +In the curl example below, all of the above are specified but they are optional. + +.. note:: See :ref:`curl-examples-and-environment-variables` if you are unfamiliar with the use of ``export`` below. + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export FILENAME='data.tsv' + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_ID=doi:10.5072/FK2/J8SJZB + + curl -H X-Dataverse-key:$API_TOKEN -X POST -F "file=@$FILENAME" -F 'jsonData={"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false"}' "$SERVER_URL/api/datasets/:persistentId/add?persistentId=$PERSISTENT_ID" + +The fully expanded example above (without environment variables) looks like this: - POST http://$SERVER/api/datasets/$id/add?key=$apiKey +.. code-block:: bash -A more detailed "add" example using curl:: + curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST -F file=@data.tsv -F jsonData={"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false"} https://demo.dataverse.org/api/datasets/:persistentId/add?persistentId=doi:10.5072/FK2/J8SJZB - curl -H "X-Dataverse-key:$API_TOKEN" -X POST -F 'file=@data.tsv' -F 'jsonData={"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"true"}' "https://example.dataverse.edu/api/datasets/:persistentId/add?persistentId=$PERSISTENT_ID" +You should expect a 201 ("CREATED") response and JSON indicating the database id that has been assigned to your newly uploaded file. Please note that it's possible to "trick" Dataverse into giving a file a content type (MIME type) of your choosing. For example, you can make a text file be treated like a video file with ``-F 'file=@README.txt;type=video/mpeg4'``, for example. If Dataverse does not properly detect a file type, specifying the content type via API like this a potential workaround. -Example python code to add a file. This may be run by changing these parameters in the sample code: +The curl syntax above to upload a file is tricky and a Python version is provided below. (Please note that it depends on libraries such as "requests" that you may need to install but this task is out of scope for this guide.) Here are some parameters you can set in the script: * ``dataverse_server`` - e.g. https://demo.dataverse.org * ``api_key`` - See the top of this document for a description @@ -640,7 +773,7 @@ Dataset Metrics Please note that these dataset level metrics are only available if support for Make Data Count has been enabled in your installation of Dataverse. See the :ref:`Dataset Metrics ` in the :doc:`/user/dataset-management` section of the User Guide and the :doc:`/admin/make-data-count` section of the Admin Guide for details. -Please note that in the curl examples, Bash environment variables are used with the idea that you can set a few environment variables and copy and paste the examples as is. For example, "$DV_BASE_URL" could become "https://demo.dataverse.org" by issuing the following ``export`` command from Bash: +.. note:: See :ref:`curl-examples-and-environment-variables` if you are unfamiliar with the use of ``export`` below. ``export DV_BASE_URL=https://demo.dataverse.org`` diff --git a/doc/sphinx-guides/source/installation/oauth2.rst b/doc/sphinx-guides/source/installation/oauth2.rst index 946cf85f3aa..d77a320446b 100644 --- a/doc/sphinx-guides/source/installation/oauth2.rst +++ b/doc/sphinx-guides/source/installation/oauth2.rst @@ -80,6 +80,8 @@ Converting Local Users to OAuth Once you have enabled at least one OAuth provider, existing users might want to change their login method from local to OAuth to avoid having a Dataverse-specific password. This is documented from the end user perspective in the :doc:`/user/account` section of the User Guide. Users will be prompted to create a new account but can choose to convert an existing local account after confirming their password. +.. _converting-oauth-users-to-local: + Converting OAuth Users to Local ------------------------------- diff --git a/doc/sphinx-guides/source/installation/shibboleth.rst b/doc/sphinx-guides/source/installation/shibboleth.rst index 46ce52b7508..813c46203c3 100644 --- a/doc/sphinx-guides/source/installation/shibboleth.rst +++ b/doc/sphinx-guides/source/installation/shibboleth.rst @@ -390,6 +390,8 @@ If you are running in "remote and local" mode and have existing local users that - If the email address associated with your local account matches the email address asserted by the Identity Provider (IdP), you will be prompted for the password of your local account and asked to confirm the conversion of your account. You're done! Browse around to ensure you see all the data you expect to see. Permissions have been preserved. - If the email address asserted by the Identity Provider (IdP) does not match the email address of any local user, you will be prompted to create a new account. If you were expecting account conversion, you should decline creating a new Shibboleth account, log back in to your local account, and let Support know the email on file for your local account. Support may ask you to change your email address for your local account to the one that is being asserted by the Identity Provider. Someone with access to the Glassfish logs will see this email address there. +.. _converting-shibboleth-users-to-local: + Converting Shibboleth Users to Local ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/doc/sphinx-guides/source/user/account.rst b/doc/sphinx-guides/source/user/account.rst index ffdbecdc4f6..8b6ccfff8a3 100755 --- a/doc/sphinx-guides/source/user/account.rst +++ b/doc/sphinx-guides/source/user/account.rst @@ -164,5 +164,33 @@ Notifications will only be emailed one time even if you haven't read the notific API Token --------- -#. To create your API token, click on your name in the header on the right hand side and then click on API Token. -#. In this tab, you can create your API Token for the first time as well as recreate it if you need a new API Token or if your API Token becomes compromised. +What APIs Are and Why They Are Useful +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +API stands for "Application Programming Interface" and Dataverse APIs allow you to take advantage of integrations with other software that may have been set up by admins of your installation of Dataverse. See the :doc:`/admin/integrations` section of the Admin Guide and the :doc:`/installation/external-tools` section of the Installation Guide for examples of software that is commonly integrated with Dataverse. + +Additionally, if you are willing to write a little code (or find someone to write it for you), APIs provide a way to automate parts of your workflow. See the :doc:`/api/getting-started` section of the API Guide for details. + +How Your API Token Is Like a Password +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +In many cases, such as when depositing data, an API Token is required to interact with Dataverse APIs. The word "token" indicates a series of letters and numbers such as ``c6527048-5bdc-48b0-a1d5-ed1b62c8113b``. Anyone who has your API Token can add and delete data as you so you should treat it with the same care as a password. + +How to Create Your API Token +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +To create your API token, click on your name in the upper right and then click "API Token". In this tab, click "Create Token". + +How to Recreate Your API Token +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +If your API Token becomes compromised or has expired, click on your name in the upper right and click "API Token". In this tab, click "Recreate Token". + +Additional Information about API Tokens and Dataverse APIs +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Dataverse APIs are documented in the :doc:`/api/index` but the following sections may be of particular interest: + +- :doc:`/api/getting-started` +- :doc:`/api/auth` +- :doc:`/api/faq` From b7a741b23e13ce0deb55a7b004c169a7e404da5a Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 20 Aug 2019 16:37:05 -0400 Subject: [PATCH 22/94] clean up #6086 --- doc/sphinx-guides/source/admin/apis.rst | 0 doc/sphinx-guides/source/api/index.rst | 1 - 2 files changed, 1 deletion(-) delete mode 100644 doc/sphinx-guides/source/admin/apis.rst diff --git a/doc/sphinx-guides/source/admin/apis.rst b/doc/sphinx-guides/source/admin/apis.rst deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/doc/sphinx-guides/source/api/index.rst b/doc/sphinx-guides/source/api/index.rst index e70c369eeeb..0c0c4c186a3 100755 --- a/doc/sphinx-guides/source/api/index.rst +++ b/doc/sphinx-guides/source/api/index.rst @@ -10,7 +10,6 @@ API Guide .. toctree:: - new intro getting-started auth From aa8db98abfd8f1a2760740508ccff0aeeb382938 Mon Sep 17 00:00:00 2001 From: j-n-c Date: Wed, 21 Aug 2019 13:00:15 +0100 Subject: [PATCH 23/94] #5665: formatting changes based on feedback from pdurbin --- doc/sphinx-guides/source/api/native-api.rst | 2 ++ .../source/installation/config.rst | 21 +++++++++++++++---- 2 files changed, 19 insertions(+), 4 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index c5bee4e250c..20c3fef1247 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -1079,6 +1079,8 @@ List all the authentication providers in the system (both enabled and disabled): GET http://$SERVER/api/admin/authenticationProviders +.. _native_api_add_auth_provider: + Add Authentication Provider ~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 759001eb6a3..71dbcd65da9 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -179,18 +179,23 @@ Auth Modes: Local vs. Remote vs. Both There are three valid configurations or modes for authenticating users to Dataverse: -- Local only (also known as "builtin" or "Username/Email"). -- Both local and remote (Shibboleth and/or OAuth). -- Remote (Shibboleth and/or OAuth) only. +Local Only Auth ++++++++++++++++ Out of the box, Dataverse is configured in "local only" mode. The "dataverseAdmin" superuser account mentioned in the :doc:`/installation/installation-main` section is an example of a local account. Internally, these accounts are called "builtin" because they are built in to the Dataverse application itself. +Both Local and Remote Auth +++++++++++++++++++++++++++ + The ``authenticationproviderrow`` database table controls which "authentication providers" are available within Dataverse. Out of the box, a single row with an id of "builtin" will be present. For each user in Dataverse, the ``authenticateduserlookup`` table will have a value under ``authenticationproviderid`` that matches this id. For example, the default "dataverseAdmin" user will have the value "builtin" under ``authenticationproviderid``. Why is this important? Users are tied to a specific authentication provider but conversion mechanisms are available to switch a user from one authentication provider to the other. As explained in the :doc:`/user/account` section of the User Guide, a graphical workflow is provided for end users to convert from the "builtin" authentication provider to a remote provider. Conversion from a remote authentication provider to the builtin provider can be performed by a sysadmin with access to the "admin" API. See the :doc:`/api/native-api` section of the API Guide for how to list users and authentication providers as JSON. -Adding and enabling a second authentication provider (``Add Authentication Provider`` and ``Enable or Disable an Authentication Provider`` from :doc:`/api/native-api`) will result in the Log In page showing additional providers for your users to choose from. By default, the Log In page will show the "builtin" provider, but you can adjust this via the ``:DefaultAuthProvider`` configuration option. Further customization can be achieved by setting ``:AllowSignUp`` (in :doc:`config`) to "false", thus preventing users from creating local accounts via the web interface. Please note that local accounts can also be created via API, and the way to prevent this is to block the ``builtin-users`` endpoint or scramble (or remove) the ``BuiltinUsers.KEY`` database setting per the :doc:`config` section. +Adding and enabling a second authentication provider (:ref:`native_api_add_auth_provider` and :ref:`api_toggle_auth_provider`) will result in the Log In page showing additional providers for your users to choose from. By default, the Log In page will show the "builtin" provider, but you can adjust this via the :ref:`conf_default_auth_provider` configuration option. Further customization can be achieved by setting :ref:`:AllowSignUp` to "false", thus preventing users from creating local accounts via the web interface. Please note that local accounts can also be created via API, and the way to prevent this is to block the ``builtin-users`` endpoint (:ref:`conf_blocked_api_endpoints`) or scramble (or remove) the ``BuiltinUsers.KEY`` database setting (:ref:`conf_built_in_users_key`) per the :doc:`config` section. To configure Shibboleth see the :doc:`shibboleth` section and to configure OAuth see the :doc:`oauth2` section. +Remote Only Auth +++++++++++++++++ + As for the "Remote only" authentication mode, it means that: - Shibboleth or OAuth has been enabled. @@ -1043,6 +1048,8 @@ Out of the box, all API endpoints are completely open, as mentioned in the secti ``curl -X PUT -d localhost-only http://localhost:8080/api/admin/settings/:BlockedApiPolicy`` +.. _conf_blocked_api_endpoints: + :BlockedApiEndpoints ++++++++++++++++++++ @@ -1059,6 +1066,8 @@ Used in conjunction with the ``:BlockedApiPolicy`` being set to ``unblock-key``. ``curl -X PUT -d s3kretKey http://localhost:8080/api/admin/settings/:BlockedApiKey`` +.. _conf_built_in_users_key: + BuiltinUsers.KEY ++++++++++++++++ @@ -1471,6 +1480,8 @@ Allow for migration of non-conformant data (especially dates) from DVN 3.x to Da The duration in minutes before "Confirm Email" URLs expire. The default is 1440 minutes (24 hours). See also the :doc:`/admin/user-administration` section of our Admin Guide. +.. _conf_default_auth_provider: + :DefaultAuthProvider ++++++++++++++++++++ @@ -1486,6 +1497,8 @@ Here is an example of setting the default auth provider back to ``builtin``: ``curl -X PUT -d builtin http://localhost:8080/api/admin/settings/:DefaultAuthProvider`` +.. _:AllowSignUp: + :AllowSignUp ++++++++++++ From d3a5b2fff2cfdd4aff799ef44ec8bfec0a8d8065 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 21 Aug 2019 09:56:16 -0400 Subject: [PATCH 24/94] link to "What are the allowed search fields?" issue #2558 --- doc/sphinx-guides/source/api/search.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/search.rst b/doc/sphinx-guides/source/api/search.rst index 8ea51b24ce4..ff2da576577 100755 --- a/doc/sphinx-guides/source/api/search.rst +++ b/doc/sphinx-guides/source/api/search.rst @@ -24,7 +24,7 @@ Parameters =============== ======= =========== Name Type Description =============== ======= =========== -q string The search term or terms. Using "title:data" will search only the "title" field. "*" can be used as a wildcard either alone or adjacent to a term (i.e. "bird*"). For example, https://demo.dataverse.org/api/search?q=title:data +q string The search term or terms. Using "title:data" will search only the "title" field. "*" can be used as a wildcard either alone or adjacent to a term (i.e. "bird*"). For example, https://demo.dataverse.org/api/search?q=title:data . For a list of fields to search, please see https://github.com/IQSS/dataverse/issues/2558 (for now). type string Can be either "dataverse", "dataset", or "file". Multiple "type" parameters can be used to include multiple types (i.e. ``type=dataset&type=file``). If omitted, all types will be returned. For example, https://demo.dataverse.org/api/search?q=*&type=dataset subtree string The identifier of the dataverse to which the search should be narrowed. The subtree of this dataverse and all its children will be searched. Multiple "subtree" parameters can be used to include multiple Dataverses. For example, https://demo.dataverse.org/api/search?q=data&subtree=birds&subtree=cats . sort string The sort field. Supported values include "name" and "date". See example under "order". From 5d264d87f7291b530e4c152b48a8f151c5e3aabe Mon Sep 17 00:00:00 2001 From: j-n-c Date: Wed, 21 Aug 2019 15:15:09 +0100 Subject: [PATCH 25/94] #5665: Based in pdurbin feedback, changed the Docs Installation Guide > Configuration to use hyphens in RST cross references. Updated Docs Developer Guide > Documentation to include this recommendation --- doc/sphinx-guides/source/api/native-api.rst | 4 ++-- .../source/developers/documentation.rst | 2 ++ doc/sphinx-guides/source/installation/config.rst | 12 ++++++------ 3 files changed, 10 insertions(+), 8 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 20c3fef1247..330e071934f 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -1079,7 +1079,7 @@ List all the authentication providers in the system (both enabled and disabled): GET http://$SERVER/api/admin/authenticationProviders -.. _native_api_add_auth_provider: +.. _native-api-add-auth-provider: Add Authentication Provider ~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -1096,7 +1096,7 @@ Show data about an authentication provider:: GET http://$SERVER/api/admin/authenticationProviders/$id -.. _api_toggle_auth_provider: +.. _api-toggle-auth-provider: Enable or Disable an Authentication Provider ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/doc/sphinx-guides/source/developers/documentation.rst b/doc/sphinx-guides/source/developers/documentation.rst index d7e417d189d..3a0806b091d 100755 --- a/doc/sphinx-guides/source/developers/documentation.rst +++ b/doc/sphinx-guides/source/developers/documentation.rst @@ -77,6 +77,8 @@ To edit the existing documentation: - Go to ~/dataverse/doc/sphinx-guides/source directory inside your clone. There, you will find the .rst files that correspond to the guides in the dataverse page (http://guides.dataverse.org/en/latest/). - Using your preferred text editor, open and edit the necessary files, or create new ones. +**NOTE:** When adding ReStructured Text (RST) `cross references `_, use the hyphen character (``-``) as the word separator for the cross reference label. For example, ``my-reference-label`` would be the preferred label for a cross reference as oposed to, for example, ``my_refrence_label``. + Once you are done, open a terminal and change directories to ~/dataverse/doc/sphinx-guides . Then, run the following commands: - ``make clean`` diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 71dbcd65da9..7305a026e47 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -189,7 +189,7 @@ Both Local and Remote Auth The ``authenticationproviderrow`` database table controls which "authentication providers" are available within Dataverse. Out of the box, a single row with an id of "builtin" will be present. For each user in Dataverse, the ``authenticateduserlookup`` table will have a value under ``authenticationproviderid`` that matches this id. For example, the default "dataverseAdmin" user will have the value "builtin" under ``authenticationproviderid``. Why is this important? Users are tied to a specific authentication provider but conversion mechanisms are available to switch a user from one authentication provider to the other. As explained in the :doc:`/user/account` section of the User Guide, a graphical workflow is provided for end users to convert from the "builtin" authentication provider to a remote provider. Conversion from a remote authentication provider to the builtin provider can be performed by a sysadmin with access to the "admin" API. See the :doc:`/api/native-api` section of the API Guide for how to list users and authentication providers as JSON. -Adding and enabling a second authentication provider (:ref:`native_api_add_auth_provider` and :ref:`api_toggle_auth_provider`) will result in the Log In page showing additional providers for your users to choose from. By default, the Log In page will show the "builtin" provider, but you can adjust this via the :ref:`conf_default_auth_provider` configuration option. Further customization can be achieved by setting :ref:`:AllowSignUp` to "false", thus preventing users from creating local accounts via the web interface. Please note that local accounts can also be created via API, and the way to prevent this is to block the ``builtin-users`` endpoint (:ref:`conf_blocked_api_endpoints`) or scramble (or remove) the ``BuiltinUsers.KEY`` database setting (:ref:`conf_built_in_users_key`) per the :doc:`config` section. +Adding and enabling a second authentication provider (:ref:`native-api-add-auth-provider` and :ref:`api-toggle-auth-provider`) will result in the Log In page showing additional providers for your users to choose from. By default, the Log In page will show the "builtin" provider, but you can adjust this via the :ref:`conf-default-auth-provider` configuration option. Further customization can be achieved by setting :ref:`conf-allow-signup` to "false", thus preventing users from creating local accounts via the web interface. Please note that local accounts can also be created via API, and the way to prevent this is to block the ``builtin-users`` endpoint (:ref:`conf-blocked-api-endpoints`) or scramble (or remove) the ``BuiltinUsers.KEY`` database setting (:ref:`conf-built-in-users-key`) per the :doc:`config` section. To configure Shibboleth see the :doc:`shibboleth` section and to configure OAuth see the :doc:`oauth2` section. @@ -201,7 +201,7 @@ As for the "Remote only" authentication mode, it means that: - Shibboleth or OAuth has been enabled. - ``:AllowSignUp`` is set to "false" to prevent users from creating local accounts via the web interface. - ``:DefaultAuthProvider`` has been set to use the desired authentication provider -- The "builtin" authentication provider has been disabled (:ref:`api_toggle_auth_provider`). Note that disabling the "builtin" authentication provider means that the API endpoint for converting an account from a remote auth provider will not work. Converting directly from one remote authentication provider to another (i.e. from GitHub to Google) is not supported. Conversion from remote is always to "builtin". Then the user initiates a conversion from "builtin" to remote. Note that longer term, the plan is to permit multiple login options to the same Dataverse account per https://github.com/IQSS/dataverse/issues/3487 (so all this talk of conversion will be moot) but for now users can only use a single login option, as explained in the :doc:`/user/account` section of the User Guide. In short, "remote only" might work for you if you only plan to use a single remote authentication provider such that no conversion between remote authentication providers will be necessary. +- The "builtin" authentication provider has been disabled (:ref:`api-toggle-auth-provider`). Note that disabling the "builtin" authentication provider means that the API endpoint for converting an account from a remote auth provider will not work. Converting directly from one remote authentication provider to another (i.e. from GitHub to Google) is not supported. Conversion from remote is always to "builtin". Then the user initiates a conversion from "builtin" to remote. Note that longer term, the plan is to permit multiple login options to the same Dataverse account per https://github.com/IQSS/dataverse/issues/3487 (so all this talk of conversion will be moot) but for now users can only use a single login option, as explained in the :doc:`/user/account` section of the User Guide. In short, "remote only" might work for you if you only plan to use a single remote authentication provider such that no conversion between remote authentication providers will be necessary. File Storage: Local Filesystem vs. Swift vs. S3 ----------------------------------------------- @@ -1048,7 +1048,7 @@ Out of the box, all API endpoints are completely open, as mentioned in the secti ``curl -X PUT -d localhost-only http://localhost:8080/api/admin/settings/:BlockedApiPolicy`` -.. _conf_blocked_api_endpoints: +.. _conf-blocked-api-endpoints: :BlockedApiEndpoints ++++++++++++++++++++ @@ -1066,7 +1066,7 @@ Used in conjunction with the ``:BlockedApiPolicy`` being set to ``unblock-key``. ``curl -X PUT -d s3kretKey http://localhost:8080/api/admin/settings/:BlockedApiKey`` -.. _conf_built_in_users_key: +.. _conf-built-in-users-key: BuiltinUsers.KEY ++++++++++++++++ @@ -1480,7 +1480,7 @@ Allow for migration of non-conformant data (especially dates) from DVN 3.x to Da The duration in minutes before "Confirm Email" URLs expire. The default is 1440 minutes (24 hours). See also the :doc:`/admin/user-administration` section of our Admin Guide. -.. _conf_default_auth_provider: +.. _conf-default-auth-provider: :DefaultAuthProvider ++++++++++++++++++++ @@ -1497,7 +1497,7 @@ Here is an example of setting the default auth provider back to ``builtin``: ``curl -X PUT -d builtin http://localhost:8080/api/admin/settings/:DefaultAuthProvider`` -.. _:AllowSignUp: +.. _conf-allow-signup: :AllowSignUp ++++++++++++ From 8ebef62d9fd97cf0a09940e8ca48058f1c4f69cb Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 21 Aug 2019 10:27:58 -0400 Subject: [PATCH 26/94] typos --- doc/sphinx-guides/source/developers/documentation.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/documentation.rst b/doc/sphinx-guides/source/developers/documentation.rst index 3a0806b091d..65e790050a2 100755 --- a/doc/sphinx-guides/source/developers/documentation.rst +++ b/doc/sphinx-guides/source/developers/documentation.rst @@ -77,7 +77,7 @@ To edit the existing documentation: - Go to ~/dataverse/doc/sphinx-guides/source directory inside your clone. There, you will find the .rst files that correspond to the guides in the dataverse page (http://guides.dataverse.org/en/latest/). - Using your preferred text editor, open and edit the necessary files, or create new ones. -**NOTE:** When adding ReStructured Text (RST) `cross references `_, use the hyphen character (``-``) as the word separator for the cross reference label. For example, ``my-reference-label`` would be the preferred label for a cross reference as oposed to, for example, ``my_refrence_label``. +**NOTE:** When adding ReStructured Text (RST) `cross references `_, use the hyphen character (``-``) as the word separator for the cross reference label. For example, ``my-reference-label`` would be the preferred label for a cross reference as opposed to, for example, ``my_reference_label``. Once you are done, open a terminal and change directories to ~/dataverse/doc/sphinx-guides . Then, run the following commands: From ac75a31a278b36346152bf743a53ac054fc135d4 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 21 Aug 2019 12:26:23 -0400 Subject: [PATCH 27/94] fix API doc on listing and defining metadata blocks #3624 --- .../_static/api/define-metadatablocks.json | 8 ++++ doc/sphinx-guides/source/api/native-api.rst | 42 +++++++++++++++++-- 2 files changed, 46 insertions(+), 4 deletions(-) create mode 100644 doc/sphinx-guides/source/_static/api/define-metadatablocks.json diff --git a/doc/sphinx-guides/source/_static/api/define-metadatablocks.json b/doc/sphinx-guides/source/_static/api/define-metadatablocks.json new file mode 100644 index 00000000000..7d4b13cad40 --- /dev/null +++ b/doc/sphinx-guides/source/_static/api/define-metadatablocks.json @@ -0,0 +1,8 @@ +[ + "citation", + "geospatial", + "socialscience", + "astrophysics", + "biomedical", + "journal" +] diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 382c84191ee..886eb65c170 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -205,16 +205,50 @@ Delete the assignment whose id is ``$id``:: List Metadata Blocks Defined on a Dataverse ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -|CORS| Get the metadata blocks defined on the passed dataverse:: +|CORS| Get the metadata blocks defined on a dataverse which determine which field are available to authors when they create and edit datasets within that dataverse. This feature is described under "General Information" in the :doc:`/user/dataverse-management` section of the User Guide. - GET http://$SERVER/api/dataverses/$id/metadatablocks?key=$apiKey +Please note that an API token is only required if the dataverse has not been published. + +.. note:: See :ref:`curl-examples-and-environment-variables` if you are unfamiliar with the use of ``export`` below. + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export ALIAS=root + export SERVER_URL=https://demo.dataverse.org + + curl -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/dataverses/$ALIAS/metadatablocks + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx https://demo.dataverse.org/api/dataverses/root/metadatablocks Define Metadata Blocks for a Dataverse ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Sets the metadata blocks of the dataverse. Makes the dataverse a metadatablock root. The query body is a JSON array with a list of metadatablocks identifiers (either id or name), such as "journal" and "geospatial" in the example below. Requires "EditDataverse" permission. In this example the "root" dataverse is being modified but you can substitute any dataverse alias: +You can define the metadata blocks available to authors within a dataverse. + +The metadata blocks that are available with a default installation of Dataverse are in :download:`define-metadatablocks.json <../_static/api/define-metadatablocks.json>` (also shown below) and you should download this file and edit it to meet your needs. Please note that the "citation" metadata block is required. You must have "EditDataverse" permission on the dataverse. + +.. literalinclude:: ../_static/api/define-metadatablocks.json + +.. note:: See :ref:`curl-examples-and-environment-variables` if you are unfamiliar with the use of ``export`` below. + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export ALIAS=root + export SERVER_URL=https://demo.dataverse.org + + curl -H X-Dataverse-key:$API_TOKEN -X POST -H \"Content-type:application/json\" --upload-file define-metadatablocks.json $SERVER_URL/api/dataverses/$ALIAS/metadatablocks + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash -``curl -H "X-Dataverse-key:$API_TOKEN" -X POST -H "Content-type:application/json" -d "[\"journal\",\"geospatial\"]" http://localhost:8080/api/dataverses/:root/metadatablocks`` + curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST -H "Content-type:application/json" --upload-file define-metadatablocks.json https://demo.dataverse.org/api/dataverses/root/metadatablocks Determine if a Dataverse Inherits Its Metadata Blocks from Its Parent ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ From cdaa4472feacf48cdd8ec7770ba3b4eeb47cf82b Mon Sep 17 00:00:00 2001 From: Danny Brooke Date: Wed, 21 Aug 2019 22:22:01 -0400 Subject: [PATCH 28/94] Update 4.16-release-notes.md --- doc/release-notes/4.16-release-notes.md | 80 ++++++++++++++++--------- 1 file changed, 51 insertions(+), 29 deletions(-) diff --git a/doc/release-notes/4.16-release-notes.md b/doc/release-notes/4.16-release-notes.md index f47927c399e..6a9890328bf 100644 --- a/doc/release-notes/4.16-release-notes.md +++ b/doc/release-notes/4.16-release-notes.md @@ -1,21 +1,42 @@ -This release brings new features, enhancements, and bug fixes to Dataverse. The release highlights include: +# Dataverse 4.16 -**Metrics Redesign** -The metrics view at both the Dataset and File level has been redesigned. The main driver of this redesign has been the expanded metrics (citations and views) provided through an integration with Make Data Count, but installations that do not adopt Make Data Count will also be able to take advantage of the new metrics panel. +This release brings new features, enhancements, and bug fixes to Dataverse. Thank you to all of the community members who contributed code, suggestions, bug reports, and other assistance across the project. -**HTML Codebook Export** -Researchers will now be able to download HTML Codebooks as an additional Dataset Export type. This codebook provides valuable information about the contents and structure of a dataset and will increase resusability of the datasets in Dataverse. +## Release Highlights -If you're running a Dataverse Installation, you should be aware of these things: +### Metrics Redesign + +The metrics view at both the Dataset and File level has been redesigned. The main driver of this redesign has been the expanded metrics (citations and views) provided through an integration with Make Data Count, but installations that do not adopt Make Data Count will also be able to take advantage of the new metrics panel. + +### HTML Codebook Export + +Researchers will now be able to download HTML Codebooks as an additional Dataset Export type. This codebook provides valuable information about the contents and structure of a dataset and will increase resusability of the datasets in Dataverse. + +### Harvesting Improvements + +The Harvesting code will now better handle problematic records during incremental harvests. Fixing this will mean not only fewer manual interventions by installation administrators to keep harvesting running, but it will also mean researchers can more easily find and access data that is important to their research. + +## Major Use Cases + +Newly-supported use cases in this release include: + +- As a researcher, I can view the works that have cited a dataset. +- As a researcher, I can view the downloads and views for a dataset, based on the Make Data Count standard. +- As a researcher, I can export an HTML codebook for a dataset. +- As a researcher, I can expect harvested datasets to be made available more regularly. +- As a researcher, I'll encounter fewer locks as I go through the publishing process. +- As an installation administrator, I no longer need to destroy a PID in another system after destroying a dataset in Dataverse. + +## Notes for Dataverse Installation Administrators **Run ReExportall** -We made changes to the citation block in this release that will require installations to run ReExportall as part of the upgrade process. +We made changes to the citation block in this release that will require installations to run ReExportall as part of the upgrade process. We've included this in the detailed instructions below. **Custom Analytics Code Changes** -You should update your custom analytics code to include CDATA sections, inside the `script` tags, around the javascript code. We have updated the documentation and sample analytics code snippet provided in [Installation Guide > Configuration > Web Analytics Code](http://guides.dataverse.org/en/latest/installation/config.html#web-analytics-code) to fix a bug that broke the rendering of the 403 and 500 custom error pgs (#5967). +You should update your custom analytics code to include CDATA sections, inside the `script` tags, around the javascript code. We have updated the documentation and sample analytics code snippet provided in [Installation Guide > Configuration > Web Analytics Code](http://guides.dataverse.org/en/latest/installation/config.html#web-analytics-code) to fix a bug that broke the rendering of the 403 and 500 custom error pgs (#5967). **Destroy Updates** -Destroying Datasets in Dataverse will now unregister/delete the PID with the PID provider. This eliminates the need for an extra step to "clean up" a PID registration after destroying a Dataset. +Destroying Datasets in Dataverse will now unregister/delete the PID with the PID provider. This eliminates the need for an extra step to "clean up" a PID registration after destroying a Dataset. **Deleting Notifications** In making the fix for #5687 we discovered that notifications created prior to 2018 may have been invalidated. With this release we advise that these older notifications are deleted from the database. The following query can be used for this purpose: @@ -23,38 +44,39 @@ In making the fix for #5687 we discovered that notifications created prior to 20 `delete from usernotification where date_part('year', senddate) < 2018;` **Lock Improvements** -In 4.15 a new lock was added to prevent parallel edits. After seeing that the lock was not being released as expected, which required administrator intervention, we've adjusted this code to release the lock as expected. - -**Harvesting Improvements** -We've updated the Harvesting code to better handle problematic records during incremental harvests. +In 4.15 a new lock was added to prevent parallel edits. After seeing that the lock was not being released as expected, which required administrator intervention, we've adjusted this code to release the lock as expected. For the complete list of code changes in this release, see the 4.16 milestone in Github. -For help with upgrading, installing, or general questions please post to the Dataverse Google Group or email support@dataverse.org. +For help with upgrading, installing, or general questions please post to the Dataverse Google Group or email support@dataverse.org. -## Installation: +## Installation If this is a new installation, please see our Installation Guide. -## Upgrade: +## Upgrade -1. Undeploy the previous version. +1. Undeploy the previous version. - - <glassfish install path>/glassfish4/bin/asadmin list-applications - - <glassfish install path>/glassfish4/bin/asadmin undeploy dataverse +- <glassfish install path>/glassfish4/bin/asadmin list-applications +- <glassfish install path>/glassfish4/bin/asadmin undeploy dataverse -2. Stop glassfish and remove the generated directory, start - - service glassfish stop - - remove the generated directory: rm -rf <glassfish install path>glassfish4/glassfish/domains/domain1/generated - - service glassfish start +1. Stop glassfish and remove the generated directory, start -3. Deploy this version. - - <glassfish install path>/glassfish4/bin/asadmin deploy <path>dataverse-4.16.war +- service glassfish stop +- remove the generated directory: rm -rf <glassfish install path>glassfish4/glassfish/domains/domain1/generated +- service glassfish start -4. Restart glassfish +1. Deploy this version. + +- <glassfish install path>/glassfish4/bin/asadmin deploy <path>dataverse-4.16.war + +1. Restart glassfish + +1. Update Citation Metadata Block -5. Update Citation Metadata Block `curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @citation.tsv -H "Content-type: text/tab-separated-values"` -6. Run ReExportall to update the citations -http://guides.dataverse.org/en/4.16/admin/metadataexport.html?highlight=export#batch-exports-through-the-api +1. Run ReExportall to update the citations + + From 9d12d4c4daa878975eddc7bc424025fc84e6365e Mon Sep 17 00:00:00 2001 From: Danny Brooke Date: Wed, 21 Aug 2019 22:28:35 -0400 Subject: [PATCH 29/94] Update 4.16-release-notes.md --- doc/release-notes/4.16-release-notes.md | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/doc/release-notes/4.16-release-notes.md b/doc/release-notes/4.16-release-notes.md index 6a9890328bf..da427bf5cc5 100644 --- a/doc/release-notes/4.16-release-notes.md +++ b/doc/release-notes/4.16-release-notes.md @@ -10,7 +10,7 @@ The metrics view at both the Dataset and File level has been redesigned. The mai ### HTML Codebook Export -Researchers will now be able to download HTML Codebooks as an additional Dataset Export type. This codebook provides valuable information about the contents and structure of a dataset and will increase resusability of the datasets in Dataverse. +Researchers will now be able to download HTML Codebooks as an additional Dataset Export type. This codebook provides valuable information about the contents and structure of a dataset and will increase reusability of the datasets in Dataverse. ### Harvesting Improvements @@ -29,21 +29,26 @@ Newly-supported use cases in this release include: ## Notes for Dataverse Installation Administrators -**Run ReExportall** +### Run ReExportall + We made changes to the citation block in this release that will require installations to run ReExportall as part of the upgrade process. We've included this in the detailed instructions below. -**Custom Analytics Code Changes** +### Custom Analytics Code Changes + You should update your custom analytics code to include CDATA sections, inside the `script` tags, around the javascript code. We have updated the documentation and sample analytics code snippet provided in [Installation Guide > Configuration > Web Analytics Code](http://guides.dataverse.org/en/latest/installation/config.html#web-analytics-code) to fix a bug that broke the rendering of the 403 and 500 custom error pgs (#5967). -**Destroy Updates** +### Destroy Updates + Destroying Datasets in Dataverse will now unregister/delete the PID with the PID provider. This eliminates the need for an extra step to "clean up" a PID registration after destroying a Dataset. -**Deleting Notifications** +### Deleting Notifications + In making the fix for #5687 we discovered that notifications created prior to 2018 may have been invalidated. With this release we advise that these older notifications are deleted from the database. The following query can be used for this purpose: `delete from usernotification where date_part('year', senddate) < 2018;` -**Lock Improvements** +### Lock Improvements + In 4.15 a new lock was added to prevent parallel edits. After seeing that the lock was not being released as expected, which required administrator intervention, we've adjusted this code to release the lock as expected. For the complete list of code changes in this release, see the 4.16 milestone in Github. From ecbf0580d2ec414e55d2a15cc823027a21ec7441 Mon Sep 17 00:00:00 2001 From: Danny Brooke Date: Wed, 21 Aug 2019 22:31:27 -0400 Subject: [PATCH 30/94] numbering is hard --- doc/release-notes/4.16-release-notes.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/doc/release-notes/4.16-release-notes.md b/doc/release-notes/4.16-release-notes.md index da427bf5cc5..cdda138d44d 100644 --- a/doc/release-notes/4.16-release-notes.md +++ b/doc/release-notes/4.16-release-notes.md @@ -66,22 +66,22 @@ If this is a new installation, please see our From fe5e9db25bb749cc785627519905a0098a415337 Mon Sep 17 00:00:00 2001 From: Danny Brooke Date: Wed, 21 Aug 2019 22:32:56 -0400 Subject: [PATCH 31/94] indentation, also hard --- doc/release-notes/4.16-release-notes.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/doc/release-notes/4.16-release-notes.md b/doc/release-notes/4.16-release-notes.md index cdda138d44d..943e39c01d2 100644 --- a/doc/release-notes/4.16-release-notes.md +++ b/doc/release-notes/4.16-release-notes.md @@ -74,14 +74,14 @@ If this is a new installation, please see our + From 117aa015892b9dc85dabf895fa5e644e48752426 Mon Sep 17 00:00:00 2001 From: Danny Brooke Date: Wed, 21 Aug 2019 22:54:54 -0400 Subject: [PATCH 32/94] local previewer is showing me something different than github, experimenting --- doc/release-notes/4.16-release-notes.md | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/doc/release-notes/4.16-release-notes.md b/doc/release-notes/4.16-release-notes.md index 943e39c01d2..ba6fb3e9ff8 100644 --- a/doc/release-notes/4.16-release-notes.md +++ b/doc/release-notes/4.16-release-notes.md @@ -73,15 +73,14 @@ If this is a new installation, please see our + From f2cbb568dabeff50c6cb95e534fcecdabca007e1 Mon Sep 17 00:00:00 2001 From: Victoria Lubitch Date: Thu, 22 Aug 2019 11:06:04 -0400 Subject: [PATCH 33/94] Allow Cors --- .../source/installation/config.rst | 8 ++ .../iq/dataverse/api/AbstractApiBean.java | 5 -- .../edu/harvard/iq/dataverse/api/Access.java | 10 --- .../iq/dataverse/api/ApiBlockingFilter.java | 9 ++- .../harvard/iq/dataverse/api/Datasets.java | 28 +++---- .../harvard/iq/dataverse/api/Dataverses.java | 12 +-- .../edu/harvard/iq/dataverse/api/Info.java | 6 +- .../edu/harvard/iq/dataverse/api/Meta.java | 6 -- .../iq/dataverse/api/MetadataBlocks.java | 4 +- .../edu/harvard/iq/dataverse/api/Metrics.java | 76 +++++++++---------- .../edu/harvard/iq/dataverse/api/Search.java | 5 +- .../settings/SettingsServiceBean.java | 9 ++- .../iq/dataverse/api/AbstractApiBeanTest.java | 6 -- 13 files changed, 86 insertions(+), 98 deletions(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 12e94e178a1..1b2f42eee1d 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -1757,3 +1757,11 @@ This can be helpful in situations where multiple organizations are sharing one D or ``curl -X PUT -d '*' http://localhost:8080/api/admin/settings/:InheritParentRoleAssignments`` +:AllowCors +++++++++++ + +Allows Cross-Origin Resource sharing(CORS). By default this setting is absent and Dataverse assumes it to be true. + +If you don’t want to allow CORS for your installation, set: + +``curl -X PUT -d 'false' http://localhost:8080/api/admin/settings/:AllowCors`` diff --git a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java index 41602fe412e..b8616809adc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java @@ -737,11 +737,6 @@ protected static Response error( Status sts, String msg ) { .add( "message", msg ).build() ).type(MediaType.APPLICATION_JSON_TYPE).build(); } - - protected Response allowCors( Response r ) { - r.getHeaders().add("Access-Control-Allow-Origin", "*"); - return r; - } } class LazyRef { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Access.java b/src/main/java/edu/harvard/iq/dataverse/api/Access.java index 6f957afe2c5..a76d65934ea 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Access.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Access.java @@ -367,10 +367,6 @@ public DownloadInstance datafile(@PathParam("fileId") String fileId, @QueryParam // TODO: throw new ServiceUnavailableException(); } } - /* - * Provide "Access-Control-Allow-Origin" header: - */ - response.setHeader("Access-Control-Allow-Origin", "*"); /* * Provide some browser-friendly headers: (?) @@ -447,8 +443,6 @@ public String tabularDatafileMetadataDDI(@PathParam("fileId") String fileId, @Q throw new ServiceUnavailableException(); } - response.setHeader("Access-Control-Allow-Origin", "*"); - return retValue; } @@ -477,8 +471,6 @@ public String dataVariableMetadataDDI(@PathParam("varId") Long varId, @QueryPara retValue = outStream.toString(); - response.setHeader("Access-Control-Allow-Origin", "*"); - return retValue; } @@ -514,8 +506,6 @@ public DownloadInstance tabularDatafileMetadataPreprocessed(@PathParam("fileId") logger.fine("Preprocessed data for tabular file "+fileId); } - response.setHeader("Access-Control-Allow-Origin", "*"); - return downloadInstance; } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/ApiBlockingFilter.java b/src/main/java/edu/harvard/iq/dataverse/api/ApiBlockingFilter.java index 6fc0e488fe3..761a6c8ef77 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/ApiBlockingFilter.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/ApiBlockingFilter.java @@ -19,6 +19,7 @@ import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; + /** * A web filter to block API administration calls. * @author michael @@ -159,9 +160,11 @@ public void doFilter(ServletRequest sr, ServletResponse sr1, FilterChain fc) thr } } try { - ((HttpServletResponse) sr1).addHeader("Access-Control-Allow-Origin", "*"); - ((HttpServletResponse) sr1).addHeader("Access-Control-Allow-Methods","PUT"); - ((HttpServletResponse) sr1).addHeader("Access-Control-Allow-Headers", "Content-Type, X-Dataverse-Key"); + if (settingsSvc.isTrueForKey(SettingsServiceBean.Key.AllowCors, true )) { + ((HttpServletResponse) sr1).addHeader("Access-Control-Allow-Origin", "*"); + ((HttpServletResponse) sr1).addHeader("Access-Control-Allow-Methods", "PUT, GET, POST, DELETE, OPTIONS"); + ((HttpServletResponse) sr1).addHeader("Access-Control-Allow-Headers", "Content-Type, X-Dataverse-Key"); + } fc.doFilter(sr, sr1); } catch ( ServletException se ) { logger.log(Level.WARNING, "Error processing " + requestURI +": " + se.getMessage(), se); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 9378b127080..bdca1c6b38b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -235,7 +235,7 @@ public Response getDataset(@PathParam("id") String id, @Context UriInfo uriInfo, MakeDataCountLoggingServiceBean.MakeDataCountEntry entry = new MakeDataCountEntry(uriInfo, headers, dvRequestService, retrieved); mdcLogService.logEntry(entry); - return allowCors(ok(jsonbuilder.add("latestVersion", (latest != null) ? json(latest) : null))); + return ok(jsonbuilder.add("latestVersion", (latest != null) ? json(latest) : null)); }); } @@ -265,10 +265,10 @@ public Response exportDataset(@QueryParam("persistentId") String persistentId, @ MakeDataCountLoggingServiceBean.MakeDataCountEntry entry = new MakeDataCountEntry(uriInfo, headers, dvRequestService, dataset); mdcLogService.logEntry(entry); - return allowCors(Response.ok() + return Response.ok() .entity(is) .type(mediaType). - build()); + build(); } catch (Exception wr) { return error(Response.Status.FORBIDDEN, "Export Failed"); } @@ -434,37 +434,37 @@ public Response useDefaultCitationDate( @PathParam("id") String id) { @GET @Path("{id}/versions") public Response listVersions( @PathParam("id") String id ) { - return allowCors(response( req -> + return response( req -> ok( execCommand( new ListVersionsCommand(req, findDatasetOrDie(id)) ) .stream() .map( d -> json(d) ) - .collect(toJsonArray())))); + .collect(toJsonArray()))); } @GET @Path("{id}/versions/{versionId}") public Response getVersion( @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { - return allowCors(response( req -> { + return response( req -> { DatasetVersion dsv = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); return (dsv == null || dsv.getId() == null) ? notFound("Dataset version not found") : ok(json(dsv)); - })); + }); } @GET @Path("{id}/versions/{versionId}/files") public Response getVersionFiles( @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { - return allowCors(response( req -> ok( jsonFileMetadatas( - getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers).getFileMetadatas())))); + return response( req -> ok( jsonFileMetadatas( + getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers).getFileMetadatas()))); } @GET @Path("{id}/versions/{versionId}/metadata") public Response getVersionMetadata( @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { - return allowCors(response( req -> ok( + return response( req -> ok( jsonByBlocks( getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers ) - .getDatasetFields())))); + .getDatasetFields()))); } @GET @@ -475,7 +475,7 @@ public Response getVersionMetadataBlock( @PathParam("id") String datasetId, @Context UriInfo uriInfo, @Context HttpHeaders headers ) { - return allowCors(response( req -> { + return response( req -> { DatasetVersion dsv = getDatasetVersionOrDie(req, versionNumber, findDatasetOrDie(datasetId), uriInfo, headers ); Map> fieldsByBlock = DatasetField.groupByBlock(dsv.getDatasetFields()); @@ -485,7 +485,7 @@ public Response getVersionMetadataBlock( @PathParam("id") String datasetId, } } return notFound("metadata block named " + blockName + " not found"); - })); + }); } @GET @@ -1887,7 +1887,7 @@ public Response getMakeDataCountMetric(@PathParam("id") String idSupplied, @Path jsonObjectBuilder.add("viewsUnique", viewsUnique); jsonObjectBuilder.add("downloadsTotal", downloadsTotal); jsonObjectBuilder.add("downloadsUnique", downloadsUnique); - return allowCors(ok(jsonObjectBuilder)); + return ok(jsonObjectBuilder); } catch (WrappedResponse wr) { return wr.getResponse(); } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 8ac90f00016..bd5f17dcd75 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -395,10 +395,10 @@ private Dataset parseDataset(String datasetJson) throws WrappedResponse { @GET @Path("{identifier}") public Response viewDataverse(@PathParam("identifier") String idtf) { - return allowCors(response(req -> ok( + return response(req -> ok( json(execCommand(new GetDataverseCommand(req, findDataverseOrDie(idtf))), settingsService.isTrueForKey(SettingsServiceBean.Key.ExcludeEmailFromExport, false) - )))); + ))); } @DELETE @@ -429,7 +429,7 @@ public Response listMetadataBlocks(@PathParam("identifier") String dvIdtf) { for (MetadataBlock mdb : blocks) { arr.add(brief.json(mdb)); } - return allowCors(ok(arr)); + return ok(arr); } catch (WrappedResponse we) { return we.getResponse(); } @@ -521,7 +521,7 @@ public Response listFacets(@PathParam("identifier") String dvIdtf) { for (DataverseFacet f : execCommand(new ListFacetsCommand(r, dataverse))) { fs.add(f.getDatasetFieldType().getName()); } - return allowCors(ok(fs)); + return ok(fs); } catch (WrappedResponse e) { return e.getResponse(); } @@ -588,11 +588,11 @@ public JsonObjectBuilder visit(DataFile df) { } }; - return allowCors(response(req -> ok( + return response(req -> ok( execCommand(new ListDataverseContentCommand(req, findDataverseOrDie(dvIdtf))) .stream() .map(dvo -> (JsonObjectBuilder) dvo.accept(ser)) - .collect(toJsonArray())) + .collect(toJsonArray()) )); } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Info.java b/src/main/java/edu/harvard/iq/dataverse/api/Info.java index a2f9d7a5217..4fe5cba5b9f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Info.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Info.java @@ -37,8 +37,8 @@ public Response getInfo() { String version = comps[0].trim(); JsonValue build = comps.length > 1 ? Json.createArrayBuilder().add(comps[1].trim()).build().get(0) : JsonValue.NULL; - return allowCors(response( req -> ok( Json.createObjectBuilder().add("version", version) - .add("build", build)))); + return response( req -> ok( Json.createObjectBuilder().add("version", version) + .add("build", build))); } @GET @@ -50,6 +50,6 @@ public Response getServer() { @GET @Path("apiTermsOfUse") public Response getTermsOfUse() { - return allowCors(response( req -> ok(systemConfig.getApiTermsOfUse()))); + return response( req -> ok(systemConfig.getApiTermsOfUse())); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Meta.java b/src/main/java/edu/harvard/iq/dataverse/api/Meta.java index 853340373d9..99e2c773b30 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Meta.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Meta.java @@ -94,8 +94,6 @@ public String variable(@PathParam("varId") Long varId, @QueryParam("fileMetadata retValue = outStream.toString(); - response.setHeader("Access-Control-Allow-Origin", "*"); - return retValue; } @@ -142,8 +140,6 @@ public String datafile(@PathParam("fileId") Long fileId, @QueryParam("fileMetada throw new ServiceUnavailableException(); } - response.setHeader("Access-Control-Allow-Origin", "*"); - return retValue; } @@ -178,8 +174,6 @@ public String dataset(@PathParam("datasetId") Long datasetId, @QueryParam("exclu throw new ServiceUnavailableException(); } - response.setHeader("Access-Control-Allow-Origin", "*"); - return retValue; } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/MetadataBlocks.java b/src/main/java/edu/harvard/iq/dataverse/api/MetadataBlocks.java index 8e1565c5738..b3e1dad13af 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/MetadataBlocks.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/MetadataBlocks.java @@ -20,7 +20,7 @@ public class MetadataBlocks extends AbstractApiBean { @GET public Response list() { - return allowCors(ok(metadataBlockSvc.listMetadataBlocks().stream().map(brief::json).collect(toJsonArray()))); + return ok(metadataBlockSvc.listMetadataBlocks().stream().map(brief::json).collect(toJsonArray())); } @Path("{identifier}") @@ -28,7 +28,7 @@ public Response list() { public Response getBlock( @PathParam("identifier") String idtf ) { MetadataBlock b = findMetadataBlock(idtf); - return allowCors( (b != null ) ? ok(json(b)) : notFound("Can't find metadata block '" + idtf + "'")); + return (b != null ) ? ok(json(b)) : notFound("Can't find metadata block '" + idtf + "'"); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Metrics.java b/src/main/java/edu/harvard/iq/dataverse/api/Metrics.java index 2694252091f..5297a84c3d0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Metrics.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Metrics.java @@ -48,7 +48,7 @@ public Response getDataversesToMonth(@Context UriInfo uriInfo, @PathParam("yyyym try { errorIfUnrecongizedQueryParamPassed(uriInfo, new String[]{""}); } catch (IllegalArgumentException ia) { - return allowCors(error(BAD_REQUEST, ia.getLocalizedMessage())); + return error(BAD_REQUEST, ia.getLocalizedMessage()); } String metricName = "dataversesToMonth"; @@ -64,12 +64,12 @@ public Response getDataversesToMonth(@Context UriInfo uriInfo, @PathParam("yyyym metricsSvc.save(new Metric(metricName, sanitizedyyyymm, null, jsonString)); } - return allowCors(ok(MetricsUtil.stringToJsonObjectBuilder(jsonString))); + return ok(MetricsUtil.stringToJsonObjectBuilder(jsonString)); //TODO: Eventually the catch in each endpoint should be more specific // and more general errors should be logged. } catch (Exception ex) { - return allowCors(error(BAD_REQUEST, ex.getLocalizedMessage())); + return error(BAD_REQUEST, ex.getLocalizedMessage()); } } @@ -79,13 +79,13 @@ public Response getDataversesPastDays(@Context UriInfo uriInfo, @PathParam("days try { errorIfUnrecongizedQueryParamPassed(uriInfo, new String[]{""}); } catch (IllegalArgumentException ia) { - return allowCors(error(BAD_REQUEST, ia.getLocalizedMessage())); + return error(BAD_REQUEST, ia.getLocalizedMessage()); } String metricName = "dataversesPastDays"; if(days < 1) { - return allowCors(error(BAD_REQUEST, "Invalid parameter for number of days.")); + return error(BAD_REQUEST, "Invalid parameter for number of days."); } try { String jsonString = metricsSvc.returnUnexpiredCacheDayBased(metricName, String.valueOf(days), null); @@ -97,10 +97,10 @@ public Response getDataversesPastDays(@Context UriInfo uriInfo, @PathParam("days metricsSvc.save(new Metric(metricName, String.valueOf(days), null, jsonString)); } - return allowCors(ok(MetricsUtil.stringToJsonObjectBuilder(jsonString))); + return ok(MetricsUtil.stringToJsonObjectBuilder(jsonString)); } catch (Exception ex) { - return allowCors(error(BAD_REQUEST, ex.getLocalizedMessage())); + return error(BAD_REQUEST, ex.getLocalizedMessage()); } } @@ -110,7 +110,7 @@ public Response getDataversesByCategory(@Context UriInfo uriInfo) { try { errorIfUnrecongizedQueryParamPassed(uriInfo, new String[]{""}); } catch (IllegalArgumentException ia) { - return allowCors(error(BAD_REQUEST, ia.getLocalizedMessage())); + return error(BAD_REQUEST, ia.getLocalizedMessage()); } String metricName = "dataversesByCategory"; @@ -124,9 +124,9 @@ public Response getDataversesByCategory(@Context UriInfo uriInfo) { metricsSvc.save(new Metric(metricName, null, null, jsonArrayString)); } - return allowCors(ok(MetricsUtil.stringToJsonArrayBuilder(jsonArrayString))); + return ok(MetricsUtil.stringToJsonArrayBuilder(jsonArrayString)); } catch (Exception ex) { - return allowCors(error(BAD_REQUEST, ex.getLocalizedMessage())); + return error(BAD_REQUEST, ex.getLocalizedMessage()); } } @@ -136,7 +136,7 @@ public Response getDataversesBySubject(@Context UriInfo uriInfo) { try { errorIfUnrecongizedQueryParamPassed(uriInfo, new String[]{""}); } catch (IllegalArgumentException ia) { - return allowCors(error(BAD_REQUEST, ia.getLocalizedMessage())); + return error(BAD_REQUEST, ia.getLocalizedMessage()); } String metricName = "dataversesBySubject"; @@ -150,9 +150,9 @@ public Response getDataversesBySubject(@Context UriInfo uriInfo) { metricsSvc.save(new Metric(metricName, null, null, jsonArrayString)); } - return allowCors(ok(MetricsUtil.stringToJsonArrayBuilder(jsonArrayString))); + return ok(MetricsUtil.stringToJsonArrayBuilder(jsonArrayString)); } catch (Exception ex) { - return allowCors(error(BAD_REQUEST, ex.getLocalizedMessage())); + return error(BAD_REQUEST, ex.getLocalizedMessage()); } } @@ -177,7 +177,7 @@ public Response getDatasetsToMonth(@Context UriInfo uriInfo, @PathParam("yyyymm" try { errorIfUnrecongizedQueryParamPassed(uriInfo, new String[]{"dataLocation"}); } catch (IllegalArgumentException ia) { - return allowCors(error(BAD_REQUEST, ia.getLocalizedMessage())); + return error(BAD_REQUEST, ia.getLocalizedMessage()); } String metricName = "datasetsToMonth"; @@ -194,10 +194,10 @@ public Response getDatasetsToMonth(@Context UriInfo uriInfo, @PathParam("yyyymm" metricsSvc.save(new Metric(metricName, sanitizedyyyymm, validDataLocation, jsonString)); } - return allowCors(ok(MetricsUtil.stringToJsonObjectBuilder(jsonString))); + return ok(MetricsUtil.stringToJsonObjectBuilder(jsonString)); } catch (Exception ex) { - return allowCors(error(BAD_REQUEST, ex.getLocalizedMessage())); + return error(BAD_REQUEST, ex.getLocalizedMessage()); } } @@ -207,13 +207,13 @@ public Response getDatasetsPastDays(@Context UriInfo uriInfo, @PathParam("days") try { errorIfUnrecongizedQueryParamPassed(uriInfo, new String[]{"dataLocation"}); } catch (IllegalArgumentException ia) { - return allowCors(error(BAD_REQUEST, ia.getLocalizedMessage())); + return error(BAD_REQUEST, ia.getLocalizedMessage()); } String metricName = "datasetsPastDays"; if(days < 1) { - return allowCors(error(BAD_REQUEST, "Invalid parameter for number of days.")); + return error(BAD_REQUEST, "Invalid parameter for number of days."); } try { String validDataLocation = MetricsUtil.validateDataLocationStringType(dataLocation); @@ -226,10 +226,10 @@ public Response getDatasetsPastDays(@Context UriInfo uriInfo, @PathParam("days") metricsSvc.save(new Metric(metricName, String.valueOf(days), validDataLocation, jsonString)); } - return allowCors(ok(MetricsUtil.stringToJsonObjectBuilder(jsonString))); + return ok(MetricsUtil.stringToJsonObjectBuilder(jsonString)); } catch (Exception ex) { - return allowCors(error(BAD_REQUEST, ex.getLocalizedMessage())); + return error(BAD_REQUEST, ex.getLocalizedMessage()); } } @@ -245,7 +245,7 @@ public Response getDatasetsBySubjectToMonth(@Context UriInfo uriInfo, @PathParam try { errorIfUnrecongizedQueryParamPassed(uriInfo, new String[]{"dataLocation"}); } catch (IllegalArgumentException ia) { - return allowCors(error(BAD_REQUEST, ia.getLocalizedMessage())); + return error(BAD_REQUEST, ia.getLocalizedMessage()); } String metricName = "datasetsBySubjectToMonth"; @@ -261,9 +261,9 @@ public Response getDatasetsBySubjectToMonth(@Context UriInfo uriInfo, @PathParam metricsSvc.save(new Metric(metricName, sanitizedyyyymm, validDataLocation, jsonArrayString)); } - return allowCors(ok(MetricsUtil.stringToJsonArrayBuilder(jsonArrayString))); + return ok(MetricsUtil.stringToJsonArrayBuilder(jsonArrayString)); } catch (Exception ex) { - return allowCors(error(BAD_REQUEST, ex.getLocalizedMessage())); + return error(BAD_REQUEST, ex.getLocalizedMessage()); } } @@ -287,7 +287,7 @@ public Response getFilesToMonth(@Context UriInfo uriInfo, @PathParam("yyyymm") S try { errorIfUnrecongizedQueryParamPassed(uriInfo, new String[]{""}); } catch (IllegalArgumentException ia) { - return allowCors(error(BAD_REQUEST, ia.getLocalizedMessage())); + return error(BAD_REQUEST, ia.getLocalizedMessage()); } String metricName = "filesToMonth"; @@ -303,9 +303,9 @@ public Response getFilesToMonth(@Context UriInfo uriInfo, @PathParam("yyyymm") S metricsSvc.save(new Metric(metricName, sanitizedyyyymm, null, jsonString)); } - return allowCors(ok(MetricsUtil.stringToJsonObjectBuilder(jsonString))); + return ok(MetricsUtil.stringToJsonObjectBuilder(jsonString)); } catch (Exception ex) { - return allowCors(error(BAD_REQUEST, ex.getLocalizedMessage())); + return error(BAD_REQUEST, ex.getLocalizedMessage()); } } @@ -315,13 +315,13 @@ public Response getFilesPastDays(@Context UriInfo uriInfo, @PathParam("days") in try { errorIfUnrecongizedQueryParamPassed(uriInfo, new String[]{""}); } catch (IllegalArgumentException ia) { - return allowCors(error(BAD_REQUEST, ia.getLocalizedMessage())); + return error(BAD_REQUEST, ia.getLocalizedMessage()); } String metricName = "filesPastDays"; if(days < 1) { - return allowCors(error(BAD_REQUEST, "Invalid parameter for number of days.")); + return error(BAD_REQUEST, "Invalid parameter for number of days."); } try { String jsonString = metricsSvc.returnUnexpiredCacheDayBased(metricName, String.valueOf(days), null); @@ -333,10 +333,10 @@ public Response getFilesPastDays(@Context UriInfo uriInfo, @PathParam("days") in metricsSvc.save(new Metric(metricName, String.valueOf(days), null, jsonString)); } - return allowCors(ok(MetricsUtil.stringToJsonObjectBuilder(jsonString))); + return ok(MetricsUtil.stringToJsonObjectBuilder(jsonString)); } catch (Exception ex) { - return allowCors(error(BAD_REQUEST, ex.getLocalizedMessage())); + return error(BAD_REQUEST, ex.getLocalizedMessage()); } } @@ -361,7 +361,7 @@ public Response getDownloadsToMonth(@Context UriInfo uriInfo, @PathParam("yyyymm try { errorIfUnrecongizedQueryParamPassed(uriInfo, new String[]{""}); } catch (IllegalArgumentException ia) { - return allowCors(error(BAD_REQUEST, ia.getLocalizedMessage())); + return error(BAD_REQUEST, ia.getLocalizedMessage()); } String metricName = "downloadsToMonth"; @@ -378,11 +378,11 @@ public Response getDownloadsToMonth(@Context UriInfo uriInfo, @PathParam("yyyymm metricsSvc.save(new Metric(metricName, sanitizedyyyymm, null, jsonString)); } - return allowCors(ok(MetricsUtil.stringToJsonObjectBuilder(jsonString))); + return ok(MetricsUtil.stringToJsonObjectBuilder(jsonString)); } catch (IllegalArgumentException ia) { - return allowCors(error(BAD_REQUEST, ia.getLocalizedMessage())); + return error(BAD_REQUEST, ia.getLocalizedMessage()); } catch (Exception ex) { - return allowCors(error(BAD_REQUEST, ex.getLocalizedMessage())); + return error(BAD_REQUEST, ex.getLocalizedMessage()); } } @@ -392,13 +392,13 @@ public Response getDownloadsPastDays(@Context UriInfo uriInfo, @PathParam("days" try { errorIfUnrecongizedQueryParamPassed(uriInfo, new String[]{""}); } catch (IllegalArgumentException ia) { - return allowCors(error(BAD_REQUEST, ia.getLocalizedMessage())); + return error(BAD_REQUEST, ia.getLocalizedMessage()); } String metricName = "downloadsPastDays"; if(days < 1) { - return allowCors(error(BAD_REQUEST, "Invalid parameter for number of days.")); + return error(BAD_REQUEST, "Invalid parameter for number of days."); } try { String jsonString = metricsSvc.returnUnexpiredCacheDayBased(metricName, String.valueOf(days), null); @@ -410,10 +410,10 @@ public Response getDownloadsPastDays(@Context UriInfo uriInfo, @PathParam("days" metricsSvc.save(new Metric(metricName, String.valueOf(days), null, jsonString)); } - return allowCors(ok(MetricsUtil.stringToJsonObjectBuilder(jsonString))); + return ok(MetricsUtil.stringToJsonObjectBuilder(jsonString)); } catch (Exception ex) { - return allowCors(error(BAD_REQUEST, ex.getLocalizedMessage())); + return error(BAD_REQUEST, ex.getLocalizedMessage()); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Search.java b/src/main/java/edu/harvard/iq/dataverse/api/Search.java index 1b5ad74d61e..e7e671c6624 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Search.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Search.java @@ -195,10 +195,9 @@ public Response search( */ return error(Response.Status.BAD_REQUEST, solrQueryResponse.getError()); } - response.setHeader("Access-Control-Allow-Origin", "*"); - return allowCors(ok(value)); + return ok(value); } else { - return allowCors(error(Response.Status.BAD_REQUEST, "q parameter is missing")); + return error(Response.Status.BAD_REQUEST, "q parameter is missing"); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java index c434a99f17f..c1295f29796 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java @@ -403,8 +403,13 @@ Whether Harvesting (OAI) service is enabled /* * */ - MDCLogPath - ; + MDCLogPath, + + /** + * Allow CORS flag (true or false). It is true by default + * + */ + AllowCors; @Override public String toString() { diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AbstractApiBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/api/AbstractApiBeanTest.java index 8730bf724a6..3e088c184ad 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/AbstractApiBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/AbstractApiBeanTest.java @@ -51,12 +51,6 @@ public void testFailIfNull_ok() throws Exception { sut.failIfNull(sut, ""); } - @Test - public void testAllowCors() { - Response r = sut.allowCors(new MockResponse(200)); - assertEquals("*", r.getHeaderString("Access-Control-Allow-Origin")); - } - @Test public void testMessagesNoJsonObject() { String message = "myMessage"; From 34e69c507ae4889b6ed3e01a9c07a6c9931e01a7 Mon Sep 17 00:00:00 2001 From: Danny Brooke Date: Thu, 22 Aug 2019 11:59:32 -0400 Subject: [PATCH 34/94] add a new header for complete list of changes --- doc/release-notes/4.16-release-notes.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/release-notes/4.16-release-notes.md b/doc/release-notes/4.16-release-notes.md index ba6fb3e9ff8..601fba64662 100644 --- a/doc/release-notes/4.16-release-notes.md +++ b/doc/release-notes/4.16-release-notes.md @@ -51,6 +51,8 @@ In making the fix for #5687 we discovered that notifications created prior to 20 In 4.15 a new lock was added to prevent parallel edits. After seeing that the lock was not being released as expected, which required administrator intervention, we've adjusted this code to release the lock as expected. +### Complete List of Changes + For the complete list of code changes in this release, see the 4.16 milestone in Github. For help with upgrading, installing, or general questions please post to the Dataverse Google Group or email support@dataverse.org. From c6b35f167b5bd2559b0796b3453134fa27438f2d Mon Sep 17 00:00:00 2001 From: Kaitlin Newson Date: Thu, 22 Aug 2019 16:45:38 -0400 Subject: [PATCH 35/94] Update 'set citation date field for a dataset' Fix typo in 'set citation date' and clarify command for passing the field type --- doc/sphinx-guides/source/api/native-api.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 330e071934f..abfdaf601dd 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -406,11 +406,11 @@ Deletes the draft version of dataset ``$id``. Only the draft version can be dele Set Citation Date Field for a Dataset ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Sets the dataset field type to be used as the citation date for the given dataset (if the dataset does not include the dataset field type, the default logic is used). The name of the dataset field type should be sent in the body of the reqeust. +Sets the dataset field type to be used as the citation date for the given dataset (if the dataset does not include the dataset field type, the default logic is used). The name of the dataset field type should be sent in the body of the request. To revert to the default logic, use ``:publicationDate`` as the ``$datasetFieldTypeName``. Note that the dataset field used has to be a date field:: - PUT http://$SERVER/api/datasets/$id/citationdate?key=$apiKey + PUT http://$SERVER/api/datasets/$id/citationdate?key=$apiKey --data "$datasetFieldTypeName" Revert Citation Date Field to Default for Dataset ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ From b6401addb0d351daf53bb25c0033655b9ae5d7f8 Mon Sep 17 00:00:00 2001 From: Julian Gautier Date: Thu, 22 Aug 2019 21:15:59 -0400 Subject: [PATCH 36/94] Edited Supported Metadata Export Formats section Reworded and added info to Supported Metadata Export Formats section #6111 --- doc/sphinx-guides/source/user/dataset-management.rst | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/doc/sphinx-guides/source/user/dataset-management.rst b/doc/sphinx-guides/source/user/dataset-management.rst index 86d3389746d..5be6f4f697c 100755 --- a/doc/sphinx-guides/source/user/dataset-management.rst +++ b/doc/sphinx-guides/source/user/dataset-management.rst @@ -25,17 +25,19 @@ For more details about what Citation and Domain Specific Metadata is supported p Supported Metadata Export Formats --------------------------------- -Once a dataset has been published its metadata is exported in a variety of formats. A button on the dataset page's metadata tab will allow a user to export the metadata of the most recently published version of the dataset. Currently supported export formats are: +Once a dataset has been published, its metadata can be exported in a variety of other metadata standards and formats, which help make datasets more discoverable and usable in other systems, such as other data repositories. On each dataset page's metadata tab, users can download metadata in the these metadata exports: - Dublin Core -- DDI (Data Documentation Initiative) -- DDI HTML Codebook +- DDI (Data Documentation Initiative Codebook 2.5) +- DDI HTML Codebook (A more human-readable, HTML version of the DDI Codebook 2.5 metadata export) - DataCite 4 - JSON (native Dataverse format) - OAI_ORE - OpenAIRE - Schema.org JSON-LD +Each dataset's metadata export contains the metadata of the most recently published version of the dataset. + Adding a New Dataset ==================== From 112a992803f5b0f00519b6676c31ea181bda9a50 Mon Sep 17 00:00:00 2001 From: Julian Gautier Date: Thu, 22 Aug 2019 21:27:23 -0400 Subject: [PATCH 37/94] Fixed typo Fixed typo --- doc/sphinx-guides/source/user/dataset-management.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/user/dataset-management.rst b/doc/sphinx-guides/source/user/dataset-management.rst index 5be6f4f697c..bb98ecb1585 100755 --- a/doc/sphinx-guides/source/user/dataset-management.rst +++ b/doc/sphinx-guides/source/user/dataset-management.rst @@ -25,7 +25,7 @@ For more details about what Citation and Domain Specific Metadata is supported p Supported Metadata Export Formats --------------------------------- -Once a dataset has been published, its metadata can be exported in a variety of other metadata standards and formats, which help make datasets more discoverable and usable in other systems, such as other data repositories. On each dataset page's metadata tab, users can download metadata in the these metadata exports: +Once a dataset has been published, its metadata can be exported in a variety of other metadata standards and formats, which help make datasets more discoverable and usable in other systems, such as other data repositories. On each dataset page's metadata tab, users can download metadata in these metadata exports: - Dublin Core - DDI (Data Documentation Initiative Codebook 2.5) @@ -36,7 +36,7 @@ Once a dataset has been published, its metadata can be exported in a variety of - OpenAIRE - Schema.org JSON-LD -Each dataset's metadata export contains the metadata of the most recently published version of the dataset. +Each of these metadata exports contains the metadata of the most recently published version of the dataset. Adding a New Dataset ==================== From f6d4a73b577eb37cc1d436117df39885e54cb89b Mon Sep 17 00:00:00 2001 From: Michael Heppler Date: Fri, 23 Aug 2019 17:19:49 -0400 Subject: [PATCH 38/94] Typo fixes for API Guide [ref #6086] --- doc/sphinx-guides/source/api/auth.rst | 2 +- doc/sphinx-guides/source/api/faq.rst | 2 +- doc/sphinx-guides/source/api/getting-started.rst | 4 ++-- doc/sphinx-guides/source/api/intro.rst | 8 ++++---- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/doc/sphinx-guides/source/api/auth.rst b/doc/sphinx-guides/source/api/auth.rst index 21e38424549..6d53e52d3b4 100644 --- a/doc/sphinx-guides/source/api/auth.rst +++ b/doc/sphinx-guides/source/api/auth.rst @@ -9,7 +9,7 @@ An API token is similar to a password and allows you to authenticate to Datavers How to Get an API Token ----------------------- -Your API token is unique to the server you are using. You cannot use your API token from one server on another server. +Your API token is unique to the server you are using. You cannot take your API token from one server and use it on another server. Instructions for getting a token are described in the :doc:`/user/account` section of the User Guide. diff --git a/doc/sphinx-guides/source/api/faq.rst b/doc/sphinx-guides/source/api/faq.rst index 0f0d71d775b..ec27d6e05b5 100644 --- a/doc/sphinx-guides/source/api/faq.rst +++ b/doc/sphinx-guides/source/api/faq.rst @@ -65,7 +65,7 @@ No, but there probably should be. If you have suggestions for how it should look .. _no-api: -What Funtionality is GUI Only and Not Available Via API +What Functionality is GUI Only and Not Available Via API ------------------------------------------------------- The following tasks cannot currently be automated via API because no API exists for them. The web interface should be used instead for these GUI-only features: diff --git a/doc/sphinx-guides/source/api/getting-started.rst b/doc/sphinx-guides/source/api/getting-started.rst index a1e957de24f..c10b29f1d85 100644 --- a/doc/sphinx-guides/source/api/getting-started.rst +++ b/doc/sphinx-guides/source/api/getting-started.rst @@ -22,7 +22,7 @@ Once you have identified a server to test with, create an account, click on your .. _curl-examples-and-environment-variables: -curl Examples and Enviroment Variables +curl Examples and Environment Variables -------------------------------------- The examples in this guide use `curl`_ for the following reasons: @@ -104,7 +104,7 @@ In order to download files, you must know their database IDs which you can get f Downloading Metadata ~~~~~~~~~~~~~~~~~~~~ -Dataset metadata is availabe in a variety of formats listed at :ref:`metadata-export-formats`. +Dataset metadata is available in a variety of formats listed at :ref:`metadata-export-formats`. See :ref:`export-dataset-metadata-api`. diff --git a/doc/sphinx-guides/source/api/intro.rst b/doc/sphinx-guides/source/api/intro.rst index fce2824faa4..5fec86fab03 100755 --- a/doc/sphinx-guides/source/api/intro.rst +++ b/doc/sphinx-guides/source/api/intro.rst @@ -35,7 +35,7 @@ API stands for "Application Programming Interface" and an example is Dataverse's uploader [label="DvUploader"] script [label="Script\n(Python,\nR, etc.)"] - addfilebutton [label="Add File Button"] + addfilebutton [label="Add File GUI"] addfileapi [label="Add File API"] storage [label="Storage",shape=box3d] @@ -66,7 +66,7 @@ The components above that use the "file" upload API are: - DvUploader is terminal-based application for uploading files that is described in the :doc:`/user/dataset-management` section of the User Guide. - OJS, OSF, and RSpace are all web applications that can integrate with Dataverse and are described in "Getting Data In" in the :doc:`/admin/integrations` section of the Admin Guide. -- The script in the diagram can be as simple as single line of code that is run in a terminal. You can copy and paste "one-liners" like this from the guide. See the :doc:`getting-started` section for examples using a tool called "curl". +- The script in the diagram can be as simple as a single line of code that is run in a terminal. You can copy and paste "one-liners" like this from the guide. See the :doc:`getting-started` section for examples using a tool called "curl". The diagram above shows only a few examples of software using a specific API but many more APIs are available. @@ -104,7 +104,7 @@ Power users may be researchers or curators who are comfortable with automating p Support Teams and Superusers ++++++++++++++++++++++++++++ -Support teams that answer questions about their installation of Dataverse should familiarized themselves with the :doc:`getting-started` section to get a sense of common tasks that researchers and curators might be trying to accomplish by using Dataverse APIs. +Support teams that answer questions about their installation of Dataverse should familiarize themselves with the :doc:`getting-started` section to get a sense of common tasks that researchers and curators might be trying to accomplish by using Dataverse APIs. Superusers of an installation of Dataverse have access a superuser dashboard described in the :doc:`/admin/dashboard` section of the Admin Guide but some operations can only be done via API. @@ -187,7 +187,7 @@ Lists of Dataverse APIs - Files - etc. -- :doc:`metrics`: For query statisics about usage of a Dataverse installation. +- :doc:`metrics`: For query statistics about usage of a Dataverse installation. - :doc:`sword`: For depositing data using a standards-based approach rather than the :doc:`native-api`. Please note that some APIs are only documented in other guides that are more suited to their audience: From 3878a562b670c717284e848037fffa28088ae8f8 Mon Sep 17 00:00:00 2001 From: Michael Heppler Date: Fri, 23 Aug 2019 17:25:08 -0400 Subject: [PATCH 39/94] Fixed some directional references to the UI in the User Guide [ref #6086] --- doc/sphinx-guides/source/user/account.rst | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/doc/sphinx-guides/source/user/account.rst b/doc/sphinx-guides/source/user/account.rst index 8b6ccfff8a3..dfb357697b6 100755 --- a/doc/sphinx-guides/source/user/account.rst +++ b/doc/sphinx-guides/source/user/account.rst @@ -74,7 +74,7 @@ Institutional log in allows you to use your log in information for your universi Create a Dataverse account using Institutional Log In ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -#. Go to “Log In” in the upper right corner of Dataverse. +#. Click the "Log In" link in the navbar. #. Select the "Your Institution" button under the "Other options" header #. Using the dropdown menu, select your institution then click the Continue button to go to your institution's log in page. #. After you put in your institutional credentials successfully, you will be brought back to Dataverse to confirm your account information, and click "Create Account". @@ -89,7 +89,7 @@ If you already have a Dataverse account associated with the Username/Email log i #. Go to the Account Information page to confirm that your account email address is the same as your institutional email address. If not, you will need to update your Dataverse account to make them match. #. Log out of Dataverse. -#. Go to “Log In” in the upper right corner of Dataverse. +#. Click the "Log In" link in the navbar. #. Select the "Your Institution" button under the "Other options" header. #. Using the dropdown menu, select your institution then click the Continue button to go to your institution's log in page. #. After you put in your institutional credentials successfully, you will be brought back to Dataverse to confirm your account information. @@ -109,7 +109,7 @@ You can set up your Dataverse account to allow you to log in using your ORCID cr Create a Dataverse account using ORCID ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -#. Go to “Log In” in the upper right corner of Dataverse. +#. Click the "Log In" link in the navbar. #. Click the "ORCID" button under the "Other options" header. #. Click the "Log In with ORCID" button to go to the ORCID website. #. If you do not already have an ORCID account, you can create one on this page. If you already have an ORCID account, click on "Sign in" and then enter your login under the "Personal account" tab. @@ -121,7 +121,7 @@ Convert your Dataverse account to use ORCID for log in If you already have a Dataverse account associated with the Username/Email log in option, but you want to convert it to use ORCID for log in, follow these steps: #. Log out of Dataverse. -#. Go to "Log In" in the upper right corner of Dataverse. +#. Click the "Log In" link in the navbar. #. Click the "ORCID" button under the "Other options" header. #. Click the "Log In with ORCID" button to go to the ORCID website. #. If you do not already have an ORCID account, you can create one on this page. If you already have an ORCID account, click on "Sign in" and then enter your login under the "Personal account" tab. @@ -179,12 +179,12 @@ In many cases, such as when depositing data, an API Token is required to interac How to Create Your API Token ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -To create your API token, click on your name in the upper right and then click "API Token". In this tab, click "Create Token". +To create your API token, click on your account name in the navbar, then select "API Token" from the dropdown menu. In this tab, click "Create Token". How to Recreate Your API Token ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -If your API Token becomes compromised or has expired, click on your name in the upper right and click "API Token". In this tab, click "Recreate Token". +If your API Token becomes compromised or has expired, click on your account name in the navbar, then select "API Token" from the dropdown menu. In this tab, click "Recreate Token". Additional Information about API Tokens and Dataverse APIs ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ From 4f5bffb18b24a617c702e2f795c195d68d1c18dd Mon Sep 17 00:00:00 2001 From: Francesco Cadili Date: Mon, 26 Aug 2019 11:59:00 +0200 Subject: [PATCH 40/94] Openaire metadata export: use TechnicalInfo as DescriptionType to express software name and version --- .../iq/dataverse/export/openaire/OpenAireExportUtil.java | 2 +- .../harvard/iq/dataverse/export/OpenAireExportUtilTest.java | 5 ++--- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java index 5984f705f7f..36698728bc1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java @@ -1230,7 +1230,7 @@ public static void writeDescriptionsElement(XMLStreamWriter xmlw, DatasetVersion if (StringUtils.isNotBlank(softwareName) && StringUtils.isNotBlank(softwareVersion)) { description_check = writeOpenTag(xmlw, "descriptions", description_check); - writeDescriptionElement(xmlw, "Methods", softwareName + ", " + softwareVersion, language); + writeDescriptionElement(xmlw, "TechnicalInfo", softwareName + ", " + softwareVersion, language); } } } diff --git a/src/test/java/edu/harvard/iq/dataverse/export/OpenAireExportUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/export/OpenAireExportUtilTest.java index dfcf399bcd8..8f0bd795b8d 100644 --- a/src/test/java/edu/harvard/iq/dataverse/export/OpenAireExportUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/export/OpenAireExportUtilTest.java @@ -22,7 +22,6 @@ import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; -import static org.junit.Assert.assertTrue; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; @@ -1005,9 +1004,9 @@ public void testWriteDescriptionsElement() throws XMLStreamException, FileNotFou + "" + "DescriptionText2" + "" - + "SoftwareName1, SoftwareVersion1" + + "SoftwareName1, SoftwareVersion1" + "" - + "SoftwareName2, SoftwareVersion2" + + "SoftwareName2, SoftwareVersion2" + "" + "OriginOfSources" + "" From b2faacb465104065be33b08bbbeae4c18f6166a5 Mon Sep 17 00:00:00 2001 From: Francesco Cadili Date: Mon, 26 Aug 2019 15:12:42 +0200 Subject: [PATCH 41/94] OpenAIRE metadata export: fix how location information is expressed (production place and geospatial metadata) --- .../export/openaire/OpenAireExportUtil.java | 36 ++++++++++++++----- .../export/OpenAireExportUtilTest.java | 5 ++- 2 files changed, 29 insertions(+), 12 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java index 5984f705f7f..58013fbbac8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java @@ -1332,6 +1332,10 @@ public static void writeGeoLocationsElement(XMLStreamWriter xmlw, DatasetVersion String geoLocationPlace = dto2Primitive(datasetVersionDTO, DatasetFieldConstant.productionPlace); boolean geoLocations_check = false; + // write geoLocations + geoLocations_check = writeOpenTag(xmlw, "geoLocations", geoLocations_check); + writeGeolocationPlace(xmlw, geoLocationPlace, language); + // get DatasetFieldConstant.geographicBoundingBox for (Map.Entry entry : datasetVersionDTO.getMetadataBlocks().entrySet()) { MetadataBlockDTO value = entry.getValue(); @@ -1340,10 +1344,10 @@ public static void writeGeoLocationsElement(XMLStreamWriter xmlw, DatasetVersion geoLocations_check = writeOpenTag(xmlw, "geoLocations", geoLocations_check); if (fieldDTO.getMultiple()) { for (HashSet foo : fieldDTO.getMultipleCompound()) { - writeGeoLocationsElement(xmlw, foo, geoLocationPlace, language); + writeGeoLocationsElement(xmlw, foo, language); } } else { - writeGeoLocationsElement(xmlw, fieldDTO.getSingleCompound(), geoLocationPlace, language); + writeGeoLocationsElement(xmlw, fieldDTO.getSingleCompound(), language); } } } @@ -1355,22 +1359,36 @@ public static void writeGeoLocationsElement(XMLStreamWriter xmlw, DatasetVersion /** * 18 GeoLocation (R) * + * Write geoLocationPlace inside geoLocation element + * * @param xmlw The Steam writer - * @param foo - * @param geoLocationPlace + * @param geoLocationPlace Geo location place * @param language current language * @throws XMLStreamException */ - public static void writeGeoLocationsElement(XMLStreamWriter xmlw, Set foo, String geoLocationPlace, String language) throws XMLStreamException { - //boolean geoLocations_check = false; + public static void writeGeolocationPlace(XMLStreamWriter xmlw, String geoLocationPlace, String language) throws XMLStreamException { boolean geoLocation_check = false; - boolean geoLocationbox_check = false; - + if (StringUtils.isNotBlank(geoLocationPlace)) { - geoLocation_check = writeOpenTag(xmlw, "geoLocation", geoLocation_check); writeFullElement(xmlw, null, "geoLocationPlace", null, geoLocationPlace, language); } + writeEndTag(xmlw, geoLocation_check); + } + + /** + * 18 GeoLocation (R) + * + * @param xmlw The Steam writer + * @param foo + * @param language current language + * @throws XMLStreamException + */ + public static void writeGeoLocationsElement(XMLStreamWriter xmlw, Set foo, String language) throws XMLStreamException { + //boolean geoLocations_check = false; + boolean geoLocation_check = false; + boolean geoLocationbox_check = false; + geoLocation_check = writeOpenTag(xmlw, "geoLocation", geoLocation_check); geoLocationbox_check = writeOpenTag(xmlw, "geoLocationBox", geoLocationbox_check); diff --git a/src/test/java/edu/harvard/iq/dataverse/export/OpenAireExportUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/export/OpenAireExportUtilTest.java index dfcf399bcd8..9bcec777802 100644 --- a/src/test/java/edu/harvard/iq/dataverse/export/OpenAireExportUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/export/OpenAireExportUtilTest.java @@ -22,7 +22,6 @@ import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; -import static org.junit.Assert.assertTrue; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; @@ -1047,7 +1046,8 @@ public void testWriteGeoLocationElement() throws XMLStreamException, FileNotFoun xmlw.close(); Assert.assertEquals("" + "" - + "ProductionPlace" + + "ProductionPlace" + + "" + "" + "10" + "20" @@ -1056,7 +1056,6 @@ public void testWriteGeoLocationElement() throws XMLStreamException, FileNotFoun + "" + "" + "" - + "ProductionPlace" + "" + "80" + "70" From 9349e0812300b93d29b5f91bdbab870adaa64fc0 Mon Sep 17 00:00:00 2001 From: Francesco Cadili Date: Mon, 26 Aug 2019 17:42:32 +0200 Subject: [PATCH 42/94] Code review (variable refactor) --- .../export/openaire/OpenAireExportUtil.java | 70 +++++++++---------- 1 file changed, 35 insertions(+), 35 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java index 58013fbbac8..a9f2cc45ae6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java @@ -223,13 +223,13 @@ public static void writeCreatorsElement(XMLStreamWriter xmlw, DatasetVersionDTO if ("citation".equals(key)) { for (FieldDTO fieldDTO : value.getFields()) { if (DatasetFieldConstant.author.equals(fieldDTO.getTypeName())) { - for (HashSet foo : fieldDTO.getMultipleCompound()) { + for (HashSet fieldDTOs : fieldDTO.getMultipleCompound()) { String creatorName = null; String affiliation = null; String nameIdentifier = null; String nameIdentifierScheme = null; - for (Iterator iterator = foo.iterator(); iterator.hasNext();) { + for (Iterator iterator = fieldDTOs.iterator(); iterator.hasNext();) { FieldDTO next = iterator.next(); if (DatasetFieldConstant.authorName.equals(next.getTypeName())) { creatorName = next.getSinglePrimitive(); @@ -459,12 +459,12 @@ public static void writeSubjectsElement(XMLStreamWriter xmlw, DatasetVersionDTO } if (DatasetFieldConstant.keyword.equals(fieldDTO.getTypeName())) { - for (HashSet foo : fieldDTO.getMultipleCompound()) { + for (HashSet fieldDTOs : fieldDTO.getMultipleCompound()) { String subject = null; String subjectScheme = null; String schemeURI = null; - for (Iterator iterator = foo.iterator(); iterator.hasNext();) { + for (Iterator iterator = fieldDTOs.iterator(); iterator.hasNext();) { FieldDTO next = iterator.next(); if (DatasetFieldConstant.keywordValue.equals(next.getTypeName())) { subject = next.getSinglePrimitive(); @@ -487,12 +487,12 @@ public static void writeSubjectsElement(XMLStreamWriter xmlw, DatasetVersionDTO } if (DatasetFieldConstant.topicClassification.equals(fieldDTO.getTypeName())) { - for (HashSet foo : fieldDTO.getMultipleCompound()) { + for (HashSet fieldDTOs : fieldDTO.getMultipleCompound()) { String subject = null; String subjectScheme = null; String schemeURI = null; - for (Iterator iterator = foo.iterator(); iterator.hasNext();) { + for (Iterator iterator = fieldDTOs.iterator(); iterator.hasNext();) { FieldDTO next = iterator.next(); if (DatasetFieldConstant.topicClassValue.equals(next.getTypeName())) { subject = next.getSinglePrimitive(); @@ -575,11 +575,11 @@ public static void writeContributorsElement(XMLStreamWriter xmlw, DatasetVersion // skip non-scompound value if (DatasetFieldConstant.producer.equals(fieldDTO.getTypeName())) { - for (HashSet foo : fieldDTO.getMultipleCompound()) { + for (HashSet fieldDTOs : fieldDTO.getMultipleCompound()) { String producerName = null; String producerAffiliation = null; - for (Iterator iterator = foo.iterator(); iterator.hasNext();) { + for (Iterator iterator = fieldDTOs.iterator(); iterator.hasNext();) { FieldDTO next = iterator.next(); if (DatasetFieldConstant.producerName.equals(next.getTypeName())) { producerName = next.getSinglePrimitive(); @@ -595,11 +595,11 @@ public static void writeContributorsElement(XMLStreamWriter xmlw, DatasetVersion } } } else if (DatasetFieldConstant.distributor.equals(fieldDTO.getTypeName())) { - for (HashSet foo : fieldDTO.getMultipleCompound()) { + for (HashSet fieldDTOs : fieldDTO.getMultipleCompound()) { String distributorName = null; String distributorAffiliation = null; - for (Iterator iterator = foo.iterator(); iterator.hasNext();) { + for (Iterator iterator = fieldDTOs.iterator(); iterator.hasNext();) { FieldDTO next = iterator.next(); if (DatasetFieldConstant.distributorName.equals(next.getTypeName())) { distributorName = next.getSinglePrimitive(); @@ -628,11 +628,11 @@ public static void writeContributorsElement(XMLStreamWriter xmlw, DatasetVersion } } } else { - for (HashSet foo : fieldDTO.getMultipleCompound()) { + for (HashSet fieldDTOs : fieldDTO.getMultipleCompound()) { String contactName = null; String contactAffiliation = null; - for (Iterator iterator = foo.iterator(); iterator.hasNext();) { + for (Iterator iterator = fieldDTOs.iterator(); iterator.hasNext();) { FieldDTO next = iterator.next(); if (DatasetFieldConstant.datasetContactName.equals(next.getTypeName())) { contactName = next.getSinglePrimitive(); @@ -649,11 +649,11 @@ public static void writeContributorsElement(XMLStreamWriter xmlw, DatasetVersion } } } else if (DatasetFieldConstant.contributor.equals(fieldDTO.getTypeName())) { - for (HashSet foo : fieldDTO.getMultipleCompound()) { + for (HashSet fieldDTOs : fieldDTO.getMultipleCompound()) { String contributorName = null; String contributorType = null; - for (Iterator iterator = foo.iterator(); iterator.hasNext();) { + for (Iterator iterator = fieldDTOs.iterator(); iterator.hasNext();) { FieldDTO next = iterator.next(); if (DatasetFieldConstant.contributorName.equals(next.getTypeName())) { contributorName = next.getSinglePrimitive(); @@ -808,11 +808,11 @@ public static void writeDatesElement(XMLStreamWriter xmlw, DatasetVersionDTO dat if ("citation".equals(key)) { for (FieldDTO fieldDTO : value.getFields()) { if (DatasetFieldConstant.dateOfCollection.equals(fieldDTO.getTypeName())) { - for (HashSet foo : fieldDTO.getMultipleCompound()) { + for (HashSet fieldDTOs : fieldDTO.getMultipleCompound()) { String dateOfCollectionStart = null; String dateOfCollectionEnd = null; - for (Iterator iterator = foo.iterator(); iterator.hasNext();) { + for (Iterator iterator = fieldDTOs.iterator(); iterator.hasNext();) { FieldDTO next = iterator.next(); if (DatasetFieldConstant.dateOfCollectionStart.equals(next.getTypeName())) { dateOfCollectionStart = next.getSinglePrimitive(); @@ -892,11 +892,11 @@ public static void writeAlternateIdentifierElement(XMLStreamWriter xmlw, Dataset if ("citation".equals(key)) { for (FieldDTO fieldDTO : value.getFields()) { if (DatasetFieldConstant.otherId.equals(fieldDTO.getTypeName())) { - for (HashSet foo : fieldDTO.getMultipleCompound()) { + for (HashSet fieldDTOs : fieldDTO.getMultipleCompound()) { String alternateIdentifier = null; String alternateIdentifierType = null; - for (Iterator iterator = foo.iterator(); iterator.hasNext();) { + for (Iterator iterator = fieldDTOs.iterator(); iterator.hasNext();) { FieldDTO next = iterator.next(); if (DatasetFieldConstant.otherIdValue.equals(next.getTypeName())) { alternateIdentifier = next.getSinglePrimitive(); @@ -966,12 +966,12 @@ public static void writeRelatedIdentifierElement(XMLStreamWriter xmlw, DatasetVe if ("citation".equals(key)) { for (FieldDTO fieldDTO : value.getFields()) { if (DatasetFieldConstant.publication.equals(fieldDTO.getTypeName())) { - for (HashSet foo : fieldDTO.getMultipleCompound()) { + for (HashSet fieldDTOs : fieldDTO.getMultipleCompound()) { String relatedIdentifierType = null; String relatedIdentifier = null; // is used when relatedIdentifierType variable is not URL String relatedURL = null; // is used when relatedIdentifierType variable is URL - for (Iterator iterator = foo.iterator(); iterator.hasNext();) { + for (Iterator iterator = fieldDTOs.iterator(); iterator.hasNext();) { FieldDTO next = iterator.next(); if (DatasetFieldConstant.publicationIDType.equals(next.getTypeName())) { relatedIdentifierType = next.getSinglePrimitive(); @@ -1188,10 +1188,10 @@ public static void writeDescriptionsElement(XMLStreamWriter xmlw, DatasetVersion if ("citation".equals(key)) { for (FieldDTO fieldDTO : value.getFields()) { if (DatasetFieldConstant.description.equals(fieldDTO.getTypeName())) { - for (HashSet foo : fieldDTO.getMultipleCompound()) { + for (HashSet fieldDTOs : fieldDTO.getMultipleCompound()) { String descriptionOfAbstract = null; - for (Iterator iterator = foo.iterator(); iterator.hasNext();) { + for (Iterator iterator = fieldDTOs.iterator(); iterator.hasNext();) { FieldDTO next = iterator.next(); if (DatasetFieldConstant.descriptionText.equals(next.getTypeName())) { descriptionOfAbstract = next.getSinglePrimitive(); @@ -1214,11 +1214,11 @@ public static void writeDescriptionsElement(XMLStreamWriter xmlw, DatasetVersion if ("citation".equals(key)) { for (FieldDTO fieldDTO : value.getFields()) { if (DatasetFieldConstant.software.equals(fieldDTO.getTypeName())) { - for (HashSet foo : fieldDTO.getMultipleCompound()) { + for (HashSet fieldDTOs : fieldDTO.getMultipleCompound()) { String softwareName = null; String softwareVersion = null; - for (Iterator iterator = foo.iterator(); iterator.hasNext();) { + for (Iterator iterator = fieldDTOs.iterator(); iterator.hasNext();) { FieldDTO next = iterator.next(); if (DatasetFieldConstant.softwareName.equals(next.getTypeName())) { softwareName = next.getSinglePrimitive(); @@ -1265,8 +1265,8 @@ public static void writeDescriptionsElement(XMLStreamWriter xmlw, DatasetVersion // String seriesName = null; String seriesInformation = null; - Set foo = fieldDTO.getSingleCompound(); - for (Iterator iterator = foo.iterator(); iterator.hasNext();) { + Set fieldDTOs = fieldDTO.getSingleCompound(); + for (Iterator iterator = fieldDTOs.iterator(); iterator.hasNext();) { FieldDTO next = iterator.next(); /*if (DatasetFieldConstant.seriesName.equals(next.getTypeName())) { seriesName = next.getSinglePrimitive(); @@ -1343,8 +1343,8 @@ public static void writeGeoLocationsElement(XMLStreamWriter xmlw, DatasetVersion if (DatasetFieldConstant.geographicBoundingBox.equals(fieldDTO.getTypeName())) { geoLocations_check = writeOpenTag(xmlw, "geoLocations", geoLocations_check); if (fieldDTO.getMultiple()) { - for (HashSet foo : fieldDTO.getMultipleCompound()) { - writeGeoLocationsElement(xmlw, foo, language); + for (HashSet fieldDTOs : fieldDTO.getMultipleCompound()) { + writeGeoLocationsElement(xmlw, fieldDTOs, language); } } else { writeGeoLocationsElement(xmlw, fieldDTO.getSingleCompound(), language); @@ -1380,11 +1380,11 @@ public static void writeGeolocationPlace(XMLStreamWriter xmlw, String geoLocatio * 18 GeoLocation (R) * * @param xmlw The Steam writer - * @param foo + * @param fieldDTOs * @param language current language * @throws XMLStreamException */ - public static void writeGeoLocationsElement(XMLStreamWriter xmlw, Set foo, String language) throws XMLStreamException { + public static void writeGeoLocationsElement(XMLStreamWriter xmlw, Set fieldDTOs, String language) throws XMLStreamException { //boolean geoLocations_check = false; boolean geoLocation_check = false; boolean geoLocationbox_check = false; @@ -1392,7 +1392,7 @@ public static void writeGeoLocationsElement(XMLStreamWriter xmlw, Set geoLocation_check = writeOpenTag(xmlw, "geoLocation", geoLocation_check); geoLocationbox_check = writeOpenTag(xmlw, "geoLocationBox", geoLocationbox_check); - for (Iterator iterator = foo.iterator(); iterator.hasNext();) { + for (Iterator iterator = fieldDTOs.iterator(); iterator.hasNext();) { FieldDTO next = iterator.next(); String typeName = next.getTypeName(); @@ -1444,11 +1444,11 @@ public static void writeFundingReferencesElement(XMLStreamWriter xmlw, DatasetVe if ("citation".equals(key)) { for (FieldDTO fieldDTO : value.getFields()) { if (DatasetFieldConstant.grantNumber.equals(fieldDTO.getTypeName())) { - for (HashSet foo : fieldDTO.getMultipleCompound()) { + for (HashSet fieldDTOs : fieldDTO.getMultipleCompound()) { String awardNumber = null; String funderName = null; - for (Iterator iterator = foo.iterator(); iterator.hasNext();) { + for (Iterator iterator = fieldDTOs.iterator(); iterator.hasNext();) { FieldDTO next = iterator.next(); if (DatasetFieldConstant.grantNumberValue.equals(next.getTypeName())) { awardNumber = next.getSinglePrimitive(); @@ -1471,11 +1471,11 @@ public static void writeFundingReferencesElement(XMLStreamWriter xmlw, DatasetVe } } } else if (DatasetFieldConstant.contributor.equals(fieldDTO.getTypeName())) { - for (HashSet foo : fieldDTO.getMultipleCompound()) { + for (HashSet fieldDTOs : fieldDTO.getMultipleCompound()) { String contributorName = null; String contributorType = null; - for (Iterator iterator = foo.iterator(); iterator.hasNext();) { + for (Iterator iterator = fieldDTOs.iterator(); iterator.hasNext();) { FieldDTO next = iterator.next(); if (DatasetFieldConstant.contributorName.equals(next.getTypeName())) { contributorName = next.getSinglePrimitive(); From 06166a1a463ac293bc69d6ae74a675d2d37e2b78 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Mon, 26 Aug 2019 15:21:07 -0400 Subject: [PATCH 43/94] #6100 replace settings service in Exporter for html code book --- .../edu/harvard/iq/dataverse/export/HtmlCodeBookExporter.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/export/HtmlCodeBookExporter.java b/src/main/java/edu/harvard/iq/dataverse/export/HtmlCodeBookExporter.java index fb9f0fa490b..f77821a59e2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/HtmlCodeBookExporter.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/HtmlCodeBookExporter.java @@ -35,7 +35,7 @@ public void exportDataset(DatasetVersion version, JsonObject json, OutputStream try { InputStream ddiInputStream; try { - ddiInputStream = ExportService.getInstance(null).getExport(version.getDataset(), "ddi"); + ddiInputStream = ExportService.getInstance(ExportService.settingsService).getExport(version.getDataset(), "ddi"); } catch(ExportException | IOException e) { throw new ExportException ("Cannot open export_ddi cached file"); } From 634957a360d7ae0ca6a34b1abef1b1d239f60998 Mon Sep 17 00:00:00 2001 From: Danny Brooke Date: Mon, 26 Aug 2019 16:33:00 -0400 Subject: [PATCH 44/94] updated wording so we can just update the list regardless of format/standard/etc. --- doc/sphinx-guides/source/user/dataset-management.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/user/dataset-management.rst b/doc/sphinx-guides/source/user/dataset-management.rst index bb98ecb1585..6df585f5821 100755 --- a/doc/sphinx-guides/source/user/dataset-management.rst +++ b/doc/sphinx-guides/source/user/dataset-management.rst @@ -25,7 +25,7 @@ For more details about what Citation and Domain Specific Metadata is supported p Supported Metadata Export Formats --------------------------------- -Once a dataset has been published, its metadata can be exported in a variety of other metadata standards and formats, which help make datasets more discoverable and usable in other systems, such as other data repositories. On each dataset page's metadata tab, users can download metadata in these metadata exports: +Once a dataset has been published, its metadata can be exported in a variety of other metadata standards and formats, which help make datasets more discoverable and usable in other systems, such as other data repositories. On each dataset page's metadata tab, the following exports are available: - Dublin Core - DDI (Data Documentation Initiative Codebook 2.5) From 5aab495928e53428aaf8dddef4c83761fd540746 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Mon, 26 Aug 2019 16:39:24 -0400 Subject: [PATCH 45/94] #5613 prelim check-in --- src/main/java/edu/harvard/iq/dataverse/DataversePage.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataversePage.java b/src/main/java/edu/harvard/iq/dataverse/DataversePage.java index ccac1b35e76..61b47c58645 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataversePage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataversePage.java @@ -679,6 +679,9 @@ public boolean isInheritMetadataBlockFromParent() { public void setInheritMetadataBlockFromParent(boolean inheritMetadataBlockFromParent) { dataverse.setMetadataBlockRoot(!inheritMetadataBlockFromParent); + if(!inheritMetadataBlockFromParent){ + //set to citation + } } public void editMetadataBlocks() { From 7c38eebf7982bb3dcff05e32e17f9a96a944e0e4 Mon Sep 17 00:00:00 2001 From: Danny Brooke Date: Mon, 26 Aug 2019 21:54:24 -0400 Subject: [PATCH 46/94] updates from review --- doc/release-notes/4.16-release-notes.md | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/doc/release-notes/4.16-release-notes.md b/doc/release-notes/4.16-release-notes.md index 601fba64662..282edfba94e 100644 --- a/doc/release-notes/4.16-release-notes.md +++ b/doc/release-notes/4.16-release-notes.md @@ -10,21 +10,21 @@ The metrics view at both the Dataset and File level has been redesigned. The mai ### HTML Codebook Export -Researchers will now be able to download HTML Codebooks as an additional Dataset Export type. This codebook provides valuable information about the contents and structure of a dataset and will increase reusability of the datasets in Dataverse. +Users will now be able to download HTML Codebooks as an additional Dataset Export type. This codebook is a more human-readable version of the DDI Codebook 2.5 metadata export and provides valuable information about the contents and structure of a dataset and will increase reusability of the datasets in Dataverse. ### Harvesting Improvements -The Harvesting code will now better handle problematic records during incremental harvests. Fixing this will mean not only fewer manual interventions by installation administrators to keep harvesting running, but it will also mean researchers can more easily find and access data that is important to their research. +The Harvesting code will now better handle problematic records during incremental harvests. Fixing this will mean not only fewer manual interventions by installation administrators to keep harvesting running, but it will also mean users can more easily find and access data that is important to their research. ## Major Use Cases Newly-supported use cases in this release include: -- As a researcher, I can view the works that have cited a dataset. -- As a researcher, I can view the downloads and views for a dataset, based on the Make Data Count standard. -- As a researcher, I can export an HTML codebook for a dataset. -- As a researcher, I can expect harvested datasets to be made available more regularly. -- As a researcher, I'll encounter fewer locks as I go through the publishing process. +- As a user, I can view the works that have cited a dataset. +- As a user, I can view the downloads and views for a dataset, based on the Make Data Count standard. +- As a user, I can export an HTML codebook for a dataset. +- As a user, I can expect harvested datasets to be made available more regularly. +- As a user, I'll encounter fewer locks as I go through the publishing process. - As an installation administrator, I no longer need to destroy a PID in another system after destroying a dataset in Dataverse. ## Notes for Dataverse Installation Administrators @@ -51,6 +51,10 @@ In making the fix for #5687 we discovered that notifications created prior to 20 In 4.15 a new lock was added to prevent parallel edits. After seeing that the lock was not being released as expected, which required administrator intervention, we've adjusted this code to release the lock as expected. +### New Database Settings + +:AllowCors - Allows Cross-Origin Resource sharing(CORS). By default this setting is absent and Dataverse assumes it to be true. + ### Complete List of Changes For the complete list of code changes in this release, see the 4.16 milestone in Github. From 79c1c24b5c598a81cb2b9323b6647328c101a37f Mon Sep 17 00:00:00 2001 From: Danny Brooke Date: Mon, 26 Aug 2019 22:29:33 -0400 Subject: [PATCH 47/94] adding section for notes for Tool Developers and Integrators --- doc/release-notes/4.16-release-notes.md | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/doc/release-notes/4.16-release-notes.md b/doc/release-notes/4.16-release-notes.md index 282edfba94e..44eecbc9d61 100644 --- a/doc/release-notes/4.16-release-notes.md +++ b/doc/release-notes/4.16-release-notes.md @@ -55,7 +55,13 @@ In 4.15 a new lock was added to prevent parallel edits. After seeing that the lo :AllowCors - Allows Cross-Origin Resource sharing(CORS). By default this setting is absent and Dataverse assumes it to be true. -### Complete List of Changes +## Notes for Tool Developers and Integrators + +### OpenAIRE Export Changes + + + +## Complete List of Changes For the complete list of code changes in this release, see the 4.16 milestone in Github. From 47434a5abebf6a44b5e2d3772a1460af7f51a6e0 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 27 Aug 2019 10:11:15 -0400 Subject: [PATCH 48/94] #5613 allow changes to selected MDBs on DV create --- src/main/java/edu/harvard/iq/dataverse/DataversePage.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataversePage.java b/src/main/java/edu/harvard/iq/dataverse/DataversePage.java index 61b47c58645..d4fff8a2e49 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataversePage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataversePage.java @@ -559,7 +559,7 @@ public void updateOptionsRadio(Long mdbId, Long dsftId) { public String save() { List listDFTIL = new ArrayList<>(); - if (editMode != null && editMode.equals(EditMode.INFO)) { + if (editMode != null && ( editMode.equals(EditMode.INFO) || editMode.equals(EditMode.CREATE))) { List selectedBlocks = new ArrayList<>(); if (dataverse.isMetadataBlockRoot()) { From 835ce5c25573d2b66b63576d08db819c696234a0 Mon Sep 17 00:00:00 2001 From: Julian Gautier Date: Tue, 27 Aug 2019 10:33:27 -0400 Subject: [PATCH 49/94] Added info about OpenAIRE export changes --- doc/release-notes/4.16-release-notes.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/doc/release-notes/4.16-release-notes.md b/doc/release-notes/4.16-release-notes.md index 44eecbc9d61..66241a42777 100644 --- a/doc/release-notes/4.16-release-notes.md +++ b/doc/release-notes/4.16-release-notes.md @@ -59,7 +59,9 @@ In 4.15 a new lock was added to prevent parallel edits. After seeing that the lo ### OpenAIRE Export Changes - +The OpenAIRE metadata export now correctly expresses information about a dataset's Production Place and GeoSpatial Bounding Box. When users add metadata to Dataverse's Production Place and GeoSpatial Bounding Box fields, those fields are now mapped to separate DataCite geoLocation properties. + +Metadata about the software name and version used to create a dataset, Software Name and Software Version, are re-mapped from DataCite's more general descriptionType="Methods" property to descriptionType="TechnicalInfo", which was added in a recent version of the DataCite schema in order to improve discoverability of metadata about the software used to create datasets. ## Complete List of Changes From 96ae41c041978e1ca9819a2165c7c32b27c8db6e Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 27 Aug 2019 10:49:31 -0400 Subject: [PATCH 50/94] #5613 remove debug code --- src/main/java/edu/harvard/iq/dataverse/DataversePage.java | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataversePage.java b/src/main/java/edu/harvard/iq/dataverse/DataversePage.java index d4fff8a2e49..a9ab6c16730 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataversePage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataversePage.java @@ -679,9 +679,6 @@ public boolean isInheritMetadataBlockFromParent() { public void setInheritMetadataBlockFromParent(boolean inheritMetadataBlockFromParent) { dataverse.setMetadataBlockRoot(!inheritMetadataBlockFromParent); - if(!inheritMetadataBlockFromParent){ - //set to citation - } } public void editMetadataBlocks() { From 651169134704e1a463bb6cf91c8b0260c4e67129 Mon Sep 17 00:00:00 2001 From: Kevin Condon Date: Wed, 28 Aug 2019 12:55:43 -0400 Subject: [PATCH 51/94] Update conf.py Update to v4.16 --- doc/sphinx-guides/source/conf.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/conf.py b/doc/sphinx-guides/source/conf.py index 8843186d7d9..ca279899c73 100755 --- a/doc/sphinx-guides/source/conf.py +++ b/doc/sphinx-guides/source/conf.py @@ -65,9 +65,9 @@ # built documents. # # The short X.Y version. -version = '4.15.1' +version = '4.16' # The full version, including alpha/beta/rc tags. -release = '4.15.1' +release = '4.16' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. From ce84ef291454f5447280725a9a6032f7c415f425 Mon Sep 17 00:00:00 2001 From: Kevin Condon Date: Wed, 28 Aug 2019 12:56:53 -0400 Subject: [PATCH 52/94] Update versions.rst Update to v4.16 --- doc/sphinx-guides/source/versions.rst | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/versions.rst b/doc/sphinx-guides/source/versions.rst index d8eed02b1aa..9535effeea2 100755 --- a/doc/sphinx-guides/source/versions.rst +++ b/doc/sphinx-guides/source/versions.rst @@ -6,8 +6,9 @@ Dataverse Guides Versions This list provides a way to refer to previous versions of the Dataverse guides, which we still host. In order to learn more about the updates delivered from one version to another, visit the `Releases `__ page in our GitHub repo. -- 4.15.1 +- 4.16 +- `4.15.1 `__ - `4.15 `__ - `4.14 `__ - `4.13 `__ From ae466996ef7b054ccb407ca4c77327c495742334 Mon Sep 17 00:00:00 2001 From: Kevin Condon Date: Wed, 28 Aug 2019 12:57:56 -0400 Subject: [PATCH 53/94] Update pom.xml Update to v4.16 --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 176d8d0cad4..97e6b297657 100644 --- a/pom.xml +++ b/pom.xml @@ -7,7 +7,7 @@ --> edu.harvard.iq dataverse - 4.15.1 + 4.16 war dataverse From 043d6420b272fc63bc97fa4ce3a9a7978faf111d Mon Sep 17 00:00:00 2001 From: dliburd Date: Wed, 28 Aug 2019 13:32:18 -0400 Subject: [PATCH 54/94] adding brand icon file --- .../source/_static/Dataverse_brand_icon.svg | 31 +++++++++++++++++++ 1 file changed, 31 insertions(+) create mode 100644 doc/sphinx-guides/source/_static/Dataverse_brand_icon.svg diff --git a/doc/sphinx-guides/source/_static/Dataverse_brand_icon.svg b/doc/sphinx-guides/source/_static/Dataverse_brand_icon.svg new file mode 100644 index 00000000000..8d9da2bd09b --- /dev/null +++ b/doc/sphinx-guides/source/_static/Dataverse_brand_icon.svg @@ -0,0 +1,31 @@ + + + + + + + + + + + + From 3b19e7dace4aa666eeb218b05298d7dca9cc4165 Mon Sep 17 00:00:00 2001 From: dliburd Date: Wed, 28 Aug 2019 13:36:43 -0400 Subject: [PATCH 55/94] adding link to svg brand icon --- doc/sphinx-guides/source/style/foundations.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/sphinx-guides/source/style/foundations.rst b/doc/sphinx-guides/source/style/foundations.rst index 5a404f2d6eb..0f96ca28ee9 100755 --- a/doc/sphinx-guides/source/style/foundations.rst +++ b/doc/sphinx-guides/source/style/foundations.rst @@ -338,6 +338,8 @@ Create both print and web version of the Dataverse Project logo by downloading t The brand logo (below) was created as a custom icon to represent the concept of a "dataverse." It is used as the brand logo in the Bootstrap navbar component and across the application. +Create both print and web version of the Dataverse brand logo by downloading this vector-based SVG file: :download:`Dataverse_brand_icon.svg <../_static/Dataverse_brand_icon.svg>` + .. raw:: html
From 7bc6c6f9ddf3bc33d64cc4d20a8a6f323e4b1b04 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Thu, 29 Aug 2019 10:55:43 +0200 Subject: [PATCH 56/94] #6132. Remove orphaned Endorsed Java API specials. Since 2013, the initial commit d6868ad, added the endorsed Java API. These days, there is no reference to be found installing the resulting files. The API was not included in the WAR file. --- pom.xml | 24 ------------------------ 1 file changed, 24 deletions(-) diff --git a/pom.xml b/pom.xml index 97e6b297657..64b142f2812 100644 --- a/pom.xml +++ b/pom.xml @@ -11,7 +11,6 @@ war dataverse - ${project.build.directory}/endorsed UTF-8 -Xdoclint:none @@ -652,9 +651,6 @@ 1.8 1.8 - - ${endorsed.dir} - ${compilerArgument} @@ -691,26 +687,6 @@ org.apache.maven.plugins maven-dependency-plugin 3.1.1 - - - validate - - copy - - - ${endorsed.dir} - true - - - javax - javaee-endorsed-api - 7.0 - jar - - - - - org.jacoco From f41c41a51f04f5fa9017217a4a069757f0898fba Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Thu, 29 Aug 2019 11:11:21 +0200 Subject: [PATCH 57/94] #6132. Add Maven better offline caching plugin to POM. Will not affect others, as it is completely unrelated to stages. Enables to use Maven in offline mode: mvn -o package See #6132 for details. --- pom.xml | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/pom.xml b/pom.xml index 64b142f2812..d2cc860c42d 100644 --- a/pom.xml +++ b/pom.xml @@ -688,6 +688,15 @@ maven-dependency-plugin 3.1.1 + + de.qaware.maven + go-offline-maven-plugin + 1.2.1 + + + + + org.jacoco jacoco-maven-plugin From a719ed04dcfbb7d9755b2096a75a9bb2548fe0d9 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Thu, 29 Aug 2019 14:43:05 +0200 Subject: [PATCH 58/94] #6132. Re-order headers of CI section in dev testing guide. Done as requested by @pdurbin on IRC. http://irclog.iq.harvard.edu/dataverse/2019-08-29#i_103746 --- doc/sphinx-guides/source/developers/testing.rst | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/doc/sphinx-guides/source/developers/testing.rst b/doc/sphinx-guides/source/developers/testing.rst index e125a858428..ec6dbf4a3c9 100755 --- a/doc/sphinx-guides/source/developers/testing.rst +++ b/doc/sphinx-guides/source/developers/testing.rst @@ -225,7 +225,7 @@ One way of generating load is by downloading many files. You can download :downl The script requires a file called ``files.txt`` to operate and database IDs for the files you want to download should each be on their own line. Continuous Integration -~~~~~~~~~~~~~~~~~~~~~~ +---------------------- The Dataverse Project currently makes use of two Continuous Integration platforms, Travis and Jenkins. @@ -236,10 +236,10 @@ Our Jenkins config is a work in progress and may be viewed at https://github.com As always, pull requests to improve our continuous integration configurations are welcome. The Phoenix Server ------------------- +~~~~~~~~~~~~~~~~~~ How the Phoenix Tests Work -~~~~~~~~~~~~~~~~~~~~~~~~~~ +^^^^^^^^^^^^^^^^^^^^^^^^^^ A server at http://phoenix.dataverse.org has been set up to test the latest code from the develop branch. Testing is done using chained builds of Jenkins jobs: @@ -248,14 +248,14 @@ A server at http://phoenix.dataverse.org has been set up to test the latest code - REST Assured Tests are run across the wire from the Jenkins server to the Phoenix server: https://build.hmdc.harvard.edu:8443/job/phoenix.dataverse.org-apitest-develop/ How to Run the Phoenix Tests -~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - Take a quick look at http://phoenix.dataverse.org to make sure the server is up and running Dataverse. If it's down, fix it. - Log into Jenkins and click "Build Now" at https://build.hmdc.harvard.edu:8443/job/phoenix.dataverse.org-build-develop/ - Wait for all three chained Jenkins jobs to complete and note if they passed or failed. If you see a failure, open a GitHub issue or at least get the attention of some developers. List of Tests Run Against the Phoenix Server -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ We haven't thought much about a good way to publicly list the "IT" classes that are executed against the phoenix server. (Currently your best bet is to look at the ``Executing Maven`` line at the top of the "Full Log" of "Console Output" of ``phoenix.dataverse.org-apitest-develop`` Jenkins job mentioned above.) We endeavor to keep the list of tests in the "all-in-one" Docker environment described above in sync with the list of tests configured in Jenkins. That is to say, refer to :download:`run-test-suite.sh <../../../../conf/docker-aio/run-test-suite.sh>` mentioned in ``conf/docker-aio/readme.txt`` for the current list of IT tests that are expected to pass. Here's a dump of that file: From 63db85751203b04d71e73ac1359909ed2123df26 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Thu, 29 Aug 2019 14:52:26 +0200 Subject: [PATCH 59/94] #6132. Add docs about offline cache plugin usage to dev guide. --- .../source/developers/testing.rst | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/doc/sphinx-guides/source/developers/testing.rst b/doc/sphinx-guides/source/developers/testing.rst index ec6dbf4a3c9..539e9be7395 100755 --- a/doc/sphinx-guides/source/developers/testing.rst +++ b/doc/sphinx-guides/source/developers/testing.rst @@ -235,6 +235,24 @@ Our Jenkins config is a work in progress and may be viewed at https://github.com As always, pull requests to improve our continuous integration configurations are welcome. +Enhance build time by caching dependencies +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +In the future, CI builds in ephemeral build environments and Docker builds can benefit from caching all dependencies and plugins. +As Dataverse is a huge project, build times can be enhanced by avoiding re-downloading everything when the Maven POM is unchanged. +To seed the cache, use the following Maven goal before using Maven in (optional) offline mode in your scripts: + +.. code:: shell + + mvn de.qaware.maven:go-offline-maven-plugin:resolve-dependencies`` + mvn -o package -DskipTests + +The example above builds the WAR file without running any tests. For other scenarios: not using offline mode allows +Maven to download more dynamic dependencies, which are not easy to track, like Surefire Plugins. Overall downloads will +reduced anyway. + +You will obviously have to utilize caching functionality of your CI service or do proper Docker layering. + The Phoenix Server ~~~~~~~~~~~~~~~~~~ From 5dafcde5e31eecab142c507a7a123ff0da1d0a46 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Thu, 29 Aug 2019 15:39:20 +0200 Subject: [PATCH 60/94] #6134. Adding a simple hook script next to the existing (unsuitable) script. --- scripts/installer/custom-build-number-hook | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100755 scripts/installer/custom-build-number-hook diff --git a/scripts/installer/custom-build-number-hook b/scripts/installer/custom-build-number-hook new file mode 100755 index 00000000000..4e7b81950bf --- /dev/null +++ b/scripts/installer/custom-build-number-hook @@ -0,0 +1,8 @@ +#!/bin/sh +# Git changes workdir to root of repo per git documentation +BRANCH_COMMIT=$(git rev-parse --abbrev-ref HEAD)-$(git log --oneline | head -1 | awk '{print $1}') +echo "build.number=$BRANCH_COMMIT" > src/main/java/BuildNumber.properties + +# Based on https://stackoverflow.com/questions/25590267 +# $6 = previous branch, $8 is next branch +#git reflog | awk 'NR==1{ print "build.number=" $8; exit }' > src/main/java/BuildNumber.properties From 03e4886200c4fe58cb5ef2e6ce8eab92838e8421 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Thu, 29 Aug 2019 15:45:53 +0200 Subject: [PATCH 61/94] #6134. Add a tip to dev guide about git hooks updating custom build number. --- doc/sphinx-guides/source/developers/tips.rst | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/doc/sphinx-guides/source/developers/tips.rst b/doc/sphinx-guides/source/developers/tips.rst index 03a98f64269..c42da3b36a8 100755 --- a/doc/sphinx-guides/source/developers/tips.rst +++ b/doc/sphinx-guides/source/developers/tips.rst @@ -150,6 +150,13 @@ Git on Mac On a Mac, you won't have git installed unless you have "Command Line Developer Tools" installed but running ``git clone`` for the first time will prompt you to install them. +Automation of custom build number on webpage +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +You can create symbolic links from ``.git/hooks/post-checkout`` and ``.git/hooks/post-commit`` to ``scripts/installer/custom-build-number-hook`` +to let Git automatically update ``src/main/java/BuildNumber.properties`` for you. This will result in showing branch name and +commit id in your test deployment webpages on the bottom right corner next to the version. + Sample Data ----------- From dd5e53536d0a900a142411f906f9f6cc3313ae33 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Thu, 29 Aug 2019 16:14:36 +0200 Subject: [PATCH 62/94] #6134. Adding cross ref and case as requested by @pdurbin review. --- doc/sphinx-guides/source/developers/tips.rst | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/tips.rst b/doc/sphinx-guides/source/developers/tips.rst index c42da3b36a8..f03ee8df2e1 100755 --- a/doc/sphinx-guides/source/developers/tips.rst +++ b/doc/sphinx-guides/source/developers/tips.rst @@ -45,6 +45,8 @@ Ensure that Dataverse Will Be Deployed to Glassfish 4.1 Click "Window" and then "Projects". Click "File" and then "Project Properties (dataverse)". Click "Run" and change "Server" from "No Server Selected" to your installation of Glassfish 4.1. Click OK. +.. _custom_build_num_script: + Make a Small Change to the Code ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -150,13 +152,15 @@ Git on Mac On a Mac, you won't have git installed unless you have "Command Line Developer Tools" installed but running ``git clone`` for the first time will prompt you to install them. -Automation of custom build number on webpage +Automation of Custom Build Number on Webpage ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ You can create symbolic links from ``.git/hooks/post-checkout`` and ``.git/hooks/post-commit`` to ``scripts/installer/custom-build-number-hook`` to let Git automatically update ``src/main/java/BuildNumber.properties`` for you. This will result in showing branch name and commit id in your test deployment webpages on the bottom right corner next to the version. +When you prefer manual updates, there is another script, see above: :ref:`custom_build_num_script`. + Sample Data ----------- From 3753a9d74f093f38d04f515aefc3af9fad473189 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Thu, 29 Aug 2019 16:23:59 +0200 Subject: [PATCH 63/94] #6134. Refactoring existing build number script to use hook script to avoid different logic. --- scripts/installer/custom-build-number | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/scripts/installer/custom-build-number b/scripts/installer/custom-build-number index abc074eba7d..d48933e6b9b 100755 --- a/scripts/installer/custom-build-number +++ b/scripts/installer/custom-build-number @@ -1,8 +1,9 @@ #!/bin/sh if [ -z "$1" ]; then - BRANCH_COMMIT=$(git rev-parse --abbrev-ref HEAD)-$(git log --oneline | head -1 | awk '{print $1}') - echo "No custom build number specified. Using $BRANCH_COMMIT" - echo "build.number=$BRANCH_COMMIT" > src/main/java/BuildNumber.properties + echo "No custom build number specified. Using branch name and commit id." + SCRIPT=$(readlink -f "$0") + SCRIPTPATH=$(dirname "$SCRIPT") + $SCRIPTPATH/custom-build-number-hook else echo "build.number=$@" > src/main/java/BuildNumber.properties fi From b14c70d9fd81ce4e70c036dc1bd473c9f64bf91a Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Thu, 29 Aug 2019 16:30:28 +0200 Subject: [PATCH 64/94] #6134. Correcting mistake: need to change to git root dir before executing git hook script. --- scripts/installer/custom-build-number | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/installer/custom-build-number b/scripts/installer/custom-build-number index d48933e6b9b..b09d84c5e03 100755 --- a/scripts/installer/custom-build-number +++ b/scripts/installer/custom-build-number @@ -3,6 +3,7 @@ if [ -z "$1" ]; then echo "No custom build number specified. Using branch name and commit id." SCRIPT=$(readlink -f "$0") SCRIPTPATH=$(dirname "$SCRIPT") + cd $(git rev-parse --show-toplevel) $SCRIPTPATH/custom-build-number-hook else echo "build.number=$@" > src/main/java/BuildNumber.properties From d3641f34fd60be0654525cac0e91e9638014bced Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Thu, 29 Aug 2019 16:38:19 +0200 Subject: [PATCH 65/94] #6134. Removing symlink caretaking as MacOS has no GNU version of readlink. --- scripts/installer/custom-build-number | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/scripts/installer/custom-build-number b/scripts/installer/custom-build-number index b09d84c5e03..ec2f99026a9 100755 --- a/scripts/installer/custom-build-number +++ b/scripts/installer/custom-build-number @@ -1,8 +1,7 @@ #!/bin/sh if [ -z "$1" ]; then echo "No custom build number specified. Using branch name and commit id." - SCRIPT=$(readlink -f "$0") - SCRIPTPATH=$(dirname "$SCRIPT") + SCRIPTPATH=$(dirname "$0") cd $(git rev-parse --show-toplevel) $SCRIPTPATH/custom-build-number-hook else From 064c6f85553699d72b413402413a7b713a137199 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Thu, 29 Aug 2019 16:44:57 +0200 Subject: [PATCH 66/94] #6134. Restored complete output as requested by @pdurbin. --- scripts/installer/custom-build-number | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/installer/custom-build-number b/scripts/installer/custom-build-number index ec2f99026a9..11f753a4539 100755 --- a/scripts/installer/custom-build-number +++ b/scripts/installer/custom-build-number @@ -1,9 +1,10 @@ #!/bin/sh if [ -z "$1" ]; then - echo "No custom build number specified. Using branch name and commit id." SCRIPTPATH=$(dirname "$0") cd $(git rev-parse --show-toplevel) $SCRIPTPATH/custom-build-number-hook + NUM=$(cat src/main/java/BuildNumber.properties | cut -f2 -d=) + echo "No custom build number specified. Using \"$NUM\"." else echo "build.number=$@" > src/main/java/BuildNumber.properties fi From 6556be10a61cb03fc63861ad9d02770d7d4ae686 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 29 Aug 2019 14:37:34 -0400 Subject: [PATCH 67/94] #6014 catch duplicate email and provide meaningful error message --- src/main/java/edu/harvard/iq/dataverse/Shib.java | 7 +++++++ src/main/java/propertyFiles/Bundle.properties | 1 + 2 files changed, 8 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/Shib.java b/src/main/java/edu/harvard/iq/dataverse/Shib.java index 8baa24067d7..d5efc9a09f4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Shib.java +++ b/src/main/java/edu/harvard/iq/dataverse/Shib.java @@ -217,6 +217,13 @@ public void init() { ShibAuthenticationProvider shibAuthProvider = new ShibAuthenticationProvider(); AuthenticatedUser au = authSvc.lookupUser(shibAuthProvider.getId(), userPersistentId); if (au != null) { + //See if there's another account with this email + AuthenticatedUser auEmail = authSvc.getAuthenticatedUserByEmail(emailAddress); + if (auEmail!= null && !auEmail.equals(au)){ + //If this email already belongs to another account throw a message for user to contact support + JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("shib.duplicate.email.error")); + return; + } state = State.REGULAR_LOGIN_INTO_EXISTING_SHIB_ACCOUNT; logger.fine("Found user based on " + userPersistentId + ". Logging in."); logger.fine("Updating display info for " + au.getName()); diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 338fd26c033..300ae509864 100755 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -2117,6 +2117,7 @@ shib.emailAddress.error=A single valid address could not be found. shib.nullerror=The SAML assertion for "{0}" was null. Please contact support. dataverse.shib.success=Your Dataverse account is now associated with your institutional account. shib.createUser.fail=Couldn't create user. +shib.duplicate.email.error=Email address provided matches an existing account. #IngestServiceBean.java ingest.failed=ingest failed From 222c19a7a550732d2dc004ca67fc498076fecb50 Mon Sep 17 00:00:00 2001 From: chenganj Date: Thu, 29 Aug 2019 15:01:31 -0400 Subject: [PATCH 68/94] Internationalization - Application Terms of Use - UNIQUE constraint on NULL values --- .../database/upgrades/upgrade_v4.15.1_to.sql | 17 ++++++++++------- .../harvard/iq/dataverse/settings/Setting.java | 18 ++++++++++++------ .../settings/SettingsServiceBean.java | 10 ++++++++-- .../harvard/iq/dataverse/util/BundleUtil.java | 2 +- .../iq/dataverse/util/SystemConfig.java | 12 ++++++++++-- src/main/java/propertyFiles/Bundle.properties | 1 + 6 files changed, 42 insertions(+), 18 deletions(-) diff --git a/scripts/database/upgrades/upgrade_v4.15.1_to.sql b/scripts/database/upgrades/upgrade_v4.15.1_to.sql index 61741314699..d294afe8b56 100644 --- a/scripts/database/upgrades/upgrade_v4.15.1_to.sql +++ b/scripts/database/upgrades/upgrade_v4.15.1_to.sql @@ -1,10 +1,13 @@ -ALTER TABLE setting ADD COLUMN lang text; +ALTER TABLE ONLY setting DROP CONSTRAINT setting_pkey ; + +ALTER TABLE setting ADD COLUMN ID SERIAL PRIMARY KEY; -UPDATE setting -SET lang = 'en'; +ALTER TABLE setting ADD COLUMN lang text; -ALTER TABLE ONLY setting - DROP CONSTRAINT setting_pkey ; +ALTER TABLE setting + ADD CONSTRAINT non_empty_lang + CHECK (lang <> ''); -ALTER TABLE ONLY setting - ADD CONSTRAINT setting_pkey PRIMARY KEY (name,lang); +CREATE UNIQUE INDEX unique_settings + ON setting + (name, coalesce(lang, '')); \ No newline at end of file diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/Setting.java b/src/main/java/edu/harvard/iq/dataverse/settings/Setting.java index 75ca9f13660..160ed693eee 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/Setting.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/Setting.java @@ -7,6 +7,8 @@ import javax.persistence.Id; import javax.persistence.NamedQueries; import javax.persistence.NamedQuery; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; /** * A single value in the config of dataverse. @@ -14,22 +16,27 @@ */ @NamedQueries({ @NamedQuery( name="Setting.deleteByName", - query="DELETE FROM Setting s WHERE s.name=:name"), + query="DELETE FROM Setting s WHERE s.name=:name AND s.lang IS NULL"), @NamedQuery( name="Setting.findAll", query="SELECT s FROM Setting s"), + @NamedQuery( name="Setting.findByName", + query = "SELECT s FROM Setting s WHERE s.name=:name AND s.lang IS NULL" ), @NamedQuery( name="Setting.deleteByNameAndLang", - query="DELETE FROM Setting s WHERE s.name=:name AND s.lang=:lang"), + query="DELETE FROM Setting s WHERE s.name=:name AND s.lang=:lang"), @NamedQuery( name="Setting.findByNameAndLang", - query = "SELECT s FROM Setting s WHERE s.name=:name AND s.lang=:lang" ), + query = "SELECT s FROM Setting s WHERE s.name=:name AND s.lang=:lang" ) }) @Entity public class Setting implements Serializable { - + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + private Long id; + + @Column(columnDefinition = "TEXT") private String name; - @Id @Column(columnDefinition = "TEXT") private String lang; @@ -42,7 +49,6 @@ public Setting() { public Setting(String name, String content) { this.name = name; this.content = content; - this.lang = "en"; } public Setting(String name, String lang, String content) { diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java index a1fa13e83e7..14e4474b264 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java @@ -430,8 +430,14 @@ public String toString() { * @return the actual setting, or {@code null}. */ public String get( String name ) { - Setting s = em.find( Setting.class, name ); - return (s!=null) ? s.getContent() : null; + List tokens = em.createNamedQuery("Setting.findByName", Setting.class) + .setParameter("name", name ) + .getResultList(); + String val = null; + if(tokens.size() > 0) { + val = tokens.get(0).getContent(); + } + return (val!=null) ? val : null; } /** diff --git a/src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java index 6b6f5f02a6b..1311a2495ae 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java @@ -146,7 +146,7 @@ public static String getStringFromDefaultPropertyFile(String key, String propert return getStringFromBundleNoMissingCheck(key, null, bundle); } - private static Locale getDefaultLocale() { + public static Locale getDefaultLocale() { String localeEnvVar = System.getenv().get("LANG"); if (localeEnvVar != null) { if (localeEnvVar.indexOf('.') > 0) { diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index f33fbb91b60..35dc00d5aff 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -57,7 +57,7 @@ public class SystemConfig { public static final String FQDN = "dataverse.fqdn"; /** - * A JVM option for specifying the "official" URL of the site. + * A JVM option for specifying the "official" URL of the site. * Unlike the FQDN option above, this would be a complete URL, * with the protocol, port number etc. */ @@ -491,7 +491,15 @@ public boolean isThumbnailGenerationDisabledForPDF() { public String getApplicationTermsOfUse() { String language = BundleUtil.getCurrentLocale().getLanguage(); String saneDefaultForAppTermsOfUse = BundleUtil.getStringFromBundle("system.app.terms"); - String appTermsOfUse = settingsService.getValueForKey(SettingsServiceBean.Key.ApplicationTermsOfUse, language, saneDefaultForAppTermsOfUse); + String appTermsOfUse = ""; + if(language.equalsIgnoreCase(BundleUtil.getDefaultLocale().getLanguage()) ) + { + appTermsOfUse = settingsService.getValueForKey(SettingsServiceBean.Key.ApplicationTermsOfUse, saneDefaultForAppTermsOfUse); + } + else + { + appTermsOfUse = settingsService.getValueForKey(SettingsServiceBean.Key.ApplicationTermsOfUse, language, saneDefaultForAppTermsOfUse); + } return appTermsOfUse; } diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 338fd26c033..b7192bdb0ce 100755 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -2383,6 +2383,7 @@ staticSearchFields.dvObjectType=Type staticSearchFields.fileTag=File Tag staticSearchFields.fileAccess=Access staticSearchFields.publicationStatus=Publication Status +staticSearchFields.subject_ss=Subject #dataverse category - Facet Labels Researcher=Researcher From 7ad8c29344356b722100e629c2ba2441501b5959 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 29 Aug 2019 15:23:53 -0400 Subject: [PATCH 69/94] #6014 more specific error message for duplicate email address --- src/main/java/propertyFiles/Bundle.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 300ae509864..bc3448febdf 100755 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -2117,7 +2117,7 @@ shib.emailAddress.error=A single valid address could not be found. shib.nullerror=The SAML assertion for "{0}" was null. Please contact support. dataverse.shib.success=Your Dataverse account is now associated with your institutional account. shib.createUser.fail=Couldn't create user. -shib.duplicate.email.error=Email address provided matches an existing account. +shib.duplicate.email.error=Cannot login, because the e-mail address associated with it has changed since previous login and is already in use by another account. #IngestServiceBean.java ingest.failed=ingest failed From ee28be67da35fa204adcb28e7b07e9625830c774 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Thu, 29 Aug 2019 19:10:29 -0400 Subject: [PATCH 70/94] mechanism for maintaining longer (configurable) session timeouts for logged in users, while keeping the anonymous sessions limited to the shorter default value (as specified in web.xml). #6035 --- .../harvard/iq/dataverse/DataverseSession.java | 15 ++++++++++++++- .../java/edu/harvard/iq/dataverse/LoginPage.java | 4 ++++ src/main/java/edu/harvard/iq/dataverse/Shib.java | 4 ++++ .../providers/builtin/DataverseUserPage.java | 1 + .../providers/oauth2/OAuth2FirstLoginPage.java | 2 ++ .../providers/oauth2/OAuth2LoginBackingBean.java | 2 ++ .../dataverse/confirmemail/ConfirmEmailPage.java | 1 + .../passwordreset/PasswordResetPage.java | 1 + .../iq/dataverse/privateurl/PrivateUrlPage.java | 1 + .../dataverse/settings/SettingsServiceBean.java | 8 +++++++- .../harvard/iq/dataverse/util/SystemConfig.java | 11 +++++++++++ src/main/webapp/WEB-INF/web.xml | 9 ++++++++- 12 files changed, 56 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseSession.java b/src/main/java/edu/harvard/iq/dataverse/DataverseSession.java index 2b9f8cbbf60..450356f8fb5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseSession.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseSession.java @@ -6,6 +6,7 @@ import edu.harvard.iq.dataverse.actionlogging.ActionLogServiceBean; import edu.harvard.iq.dataverse.authorization.users.GuestUser; import edu.harvard.iq.dataverse.authorization.users.User; +import edu.harvard.iq.dataverse.util.SystemConfig; import java.io.IOException; import java.io.Serializable; import java.util.Locale; @@ -16,6 +17,7 @@ import javax.inject.Inject; import javax.inject.Named; import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpSession; /** * @@ -40,6 +42,9 @@ public class DataverseSession implements Serializable{ @Inject SettingsWrapper settingsWrapper; + @EJB + SystemConfig systemConfig; + private static final Logger logger = Logger.getLogger(DataverseSession.class.getCanonicalName()); private boolean statusDismissed = false; @@ -133,6 +138,14 @@ public void updateLocaleInViewRoot() { } } - + public void configureSessionTimeout() { + HttpSession httpSession = (HttpSession) FacesContext.getCurrentInstance().getExternalContext().getSession(false); + + if (httpSession != null) { + logger.info("jsession: "+httpSession.getId()+" setting the lifespan of the session to " + systemConfig.getLoginSessionTimeout() + " minutes"); + httpSession.setMaxInactiveInterval(systemConfig.getLoginSessionTimeout() * 60); // session timeout, in seconds + } + + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/LoginPage.java b/src/main/java/edu/harvard/iq/dataverse/LoginPage.java index b8f2abadbad..3bd6ff08eea 100644 --- a/src/main/java/edu/harvard/iq/dataverse/LoginPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/LoginPage.java @@ -168,6 +168,10 @@ public String login() { AuthenticatedUser r = authSvc.getUpdateAuthenticatedUser(credentialsAuthProviderId, authReq); logger.log(Level.FINE, "User authenticated: {0}", r.getEmail()); session.setUser(r); + session.configureSessionTimeout(); + //HttpSession httpSession = (HttpSession) FacesContext.getCurrentInstance().getExternalContext().getSession(false); + //logger.log(Level.INFO, "logged in as user "+r.getName()+", jsession id: "+httpSession.getId()+" (setting the lifespan of the session to 1 hour)"); + //httpSession.setMaxInactiveInterval(systemConfig.getLoginSessionTimeout() * 60); // session timeout, in seconds if ("dataverse.xhtml".equals(redirectPage)) { redirectPage = redirectToRoot(); diff --git a/src/main/java/edu/harvard/iq/dataverse/Shib.java b/src/main/java/edu/harvard/iq/dataverse/Shib.java index 8baa24067d7..b5c5a58d79c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Shib.java +++ b/src/main/java/edu/harvard/iq/dataverse/Shib.java @@ -341,6 +341,10 @@ public String confirmAndConvertAccount() { private void logInUserAndSetShibAttributes(AuthenticatedUser au) { au.setShibIdentityProvider(shibIdp); session.setUser(au); + // TODO: Should I extend the user session timeout here? + // or is this workflow will eventually send the user to the login page, and + // the timeout will be set there? -- L.A. + session.configureSessionTimeout(); logger.fine("Groups for user " + au.getId() + " (" + au.getIdentifier() + "): " + getGroups(au)); } diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPage.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPage.java index b370ca25a0a..0b404a446da 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPage.java @@ -329,6 +329,7 @@ public String save() { // Authenticated user registered. Save the new bulitin, and log in. builtinUserService.save(builtinUser); session.setUser(au); + session.configureSessionTimeout(); /** * @todo Move this to * AuthenticationServiceBean.createAuthenticatedUser diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2FirstLoginPage.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2FirstLoginPage.java index 48258b9c0d3..213feeba8f6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2FirstLoginPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2FirstLoginPage.java @@ -183,6 +183,7 @@ public String createNewAccount() { newUser.getDisplayInfo().getPosition()); final AuthenticatedUser user = authenticationSvc.createAuthenticatedUser(newUser.getUserRecordIdentifier(), getUsername(), newAud, true); session.setUser(user); + session.configureSessionTimeout(); /** * @todo Move this to AuthenticationServiceBean.createAuthenticatedUser */ @@ -210,6 +211,7 @@ public String convertExistingAccount() { builtinUserSvc.removeUser(existingUser.getUserIdentifier()); session.setUser(existingUser); + session.configureSessionTimeout(); AuthenticationProvider newUserAuthProvider = authenticationSvc.getAuthenticationProvider(newUser.getServiceId()); JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("oauth2.convertAccount.success", Arrays.asList(newUserAuthProvider.getInfo().getTitle()))); diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBean.java index 6fdc33b48b3..44bafa980fe 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBean.java @@ -102,6 +102,8 @@ public void exchangeCodeForToken() throws IOException { } else { // login the user and redirect to HOME of intended page (if any). session.setUser(dvUser); + session.configureSessionTimeout(); + final OAuth2TokenData tokenData = oauthUser.getTokenData(); tokenData.setUser(dvUser); tokenData.setOauthProviderId(idp.getId()); diff --git a/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailPage.java b/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailPage.java index 823d2c111f2..45a04ba4185 100644 --- a/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailPage.java @@ -55,6 +55,7 @@ public String init() { if (confirmEmailData != null) { user = confirmEmailData.getAuthenticatedUser(); session.setUser(user); + session.configureSessionTimeout(); // TODO: is this needed here? (it can't hurt, but still) JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("confirmEmail.details.success")); return "/dataverse.xhtml?faces-redirect=true"; } diff --git a/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetPage.java b/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetPage.java index 450374d6e66..532c0216038 100644 --- a/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetPage.java @@ -146,6 +146,7 @@ public String resetPassword() { String builtinAuthProviderId = BuiltinAuthenticationProvider.PROVIDER_ID; AuthenticatedUser au = authSvc.lookupUser(builtinAuthProviderId, user.getUserName()); session.setUser(au); + session.configureSessionTimeout(); return "/dataverse.xhtml?alias=" + dataverseService.findRootDataverse().getAlias() + "faces-redirect=true"; } else { FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_ERROR, response.getMessageSummary(), response.getMessageDetail())); diff --git a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlPage.java b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlPage.java index b0658f10b34..e8bc9fc3da7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlPage.java @@ -35,6 +35,7 @@ public String init() { String draftDatasetPageToBeRedirectedTo = privateUrlRedirectData.getDraftDatasetPageToBeRedirectedTo() + "&faces-redirect=true"; PrivateUrlUser privateUrlUser = privateUrlRedirectData.getPrivateUrlUser(); session.setUser(privateUrlUser); + session.configureSessionTimeout(); logger.info("Redirecting PrivateUrlUser '" + privateUrlUser.getIdentifier() + "' to " + draftDatasetPageToBeRedirectedTo); return draftDatasetPageToBeRedirectedTo; } catch (Exception ex) { diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java index c1295f29796..0fbb9d6e9f8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java @@ -409,7 +409,13 @@ Whether Harvesting (OAI) service is enabled * Allow CORS flag (true or false). It is true by default * */ - AllowCors; + AllowCors, + + /** + * Lifespan, in minutes, of a login user session  + * (both DataverseSession and the underlying HttpSession) + */ + LoginSessionTimeout; @Override public String toString() { diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index b36c6e2bdec..b47bc4e785f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -92,6 +92,7 @@ public class SystemConfig { */ private static final int defaultZipUploadFilesLimit = 1000; private static final int defaultMultipleUploadFilesLimit = 1000; + private static final int defaultLoginSessionTimeout = 480; // = 8 hours private static String appVersionString = null; private static String buildNumberString = null; @@ -432,6 +433,16 @@ public int getZipUploadFilesLimit() { return getIntLimitFromStringOrDefault(limitOption, defaultZipUploadFilesLimit); } + /** + * Session timeout, in minutes. + * (default value provided) + */ + public int getLoginSessionTimeout() { + return getIntLimitFromStringOrDefault( + settingsService.getValueForKey(SettingsServiceBean.Key.LoginSessionTimeout), + defaultLoginSessionTimeout); + } + /* ` the number of files the GUI user is allowed to upload in one batch, via drag-and-drop, or through the file select dialog diff --git a/src/main/webapp/WEB-INF/web.xml b/src/main/webapp/WEB-INF/web.xml index 41a82a71952..ed6944063d1 100644 --- a/src/main/webapp/WEB-INF/web.xml +++ b/src/main/webapp/WEB-INF/web.xml @@ -131,8 +131,15 @@ + + + + + + + - 1440 + 10 From 69f0b1276cc17ffadb14fff7d175e70d331a5203 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Fri, 30 Aug 2019 15:40:03 -0400 Subject: [PATCH 71/94] #6128 remove random sleeps; add sleeps for ingest only --- .../harvard/iq/dataverse/api/AccessIT.java | 44 +++++++++++++++---- 1 file changed, 35 insertions(+), 9 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java index b35e0a44ae5..b20d6eb91d2 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java @@ -108,7 +108,6 @@ public static void setUp() throws InterruptedException { tabFile1Name = "120745.dta"; tabFile1NameConvert = tabFile1Name.substring(0, tabFile1Name.indexOf(".dta")) + ".tab"; String tab1PathToFile = "scripts/search/data/tabular/" + tabFile1Name; - Thread.sleep(1000); //Added because tests are failing during setup, test is probably going too fast. Especially between first and second file Response tab1AddResponse = UtilIT.uploadFileViaNative(datasetId.toString(), tab1PathToFile, apiToken); tabFile1Id = JsonPath.from(tab1AddResponse.body().asString()).getInt("data.files[0].dataFile.id"); //String origFilePid = JsonPath.from(addResponse.body().asString()).getString("data.files[0].dataFile.persistentId"); @@ -116,17 +115,36 @@ public static void setUp() throws InterruptedException { tabFile2Name = "stata13-auto.dta"; tabFile2NameConvert = tabFile2Name.substring(0, tabFile2Name.indexOf(".dta")) + ".tab"; String tab2PathToFile = "scripts/search/data/tabular/" + tabFile2Name; - Thread.sleep(1000); //Added because tests are failing during setup, test is probably going too fast. Especially between first and second file Response tab2AddResponse = UtilIT.uploadFileViaNative(datasetId.toString(), tab2PathToFile, apiToken); tabFile2Id = JsonPath.from(tab2AddResponse.body().asString()).getInt("data.files[0].dataFile.id"); tabFile3NameRestricted = "stata13-auto-withstrls.dta"; tabFile3NameRestrictedConvert = tabFile3NameRestricted.substring(0, tabFile3NameRestricted.indexOf(".dta")) + ".tab"; String tab3PathToFile = "scripts/search/data/tabular/" + tabFile3NameRestricted; - Thread.sleep(1000); //Added because tests are failing during setup, test is probably going too fast. Especially between first and second file + + Response lockedForIngest; + int i = 0; + do { + Thread.sleep(1000); + lockedForIngest = UtilIT.checkDatasetLocks(datasetId.longValue(), "Ingest", apiToken); + i++; + if (i > 3) break; // only do this three times if ingest takes longer fail the test + } while (lockedForIngest.body().prettyPrint().contains("Ingest")); + + assertTrue("Failed test if Ingest Lock lasts more than sleep(3000)", i <= 3); + Response tab3AddResponse = UtilIT.uploadFileViaNative(datasetId.toString(), tab3PathToFile, apiToken); tabFile3IdRestricted = JsonPath.from(tab3AddResponse.body().asString()).getInt("data.files[0].dataFile.id"); - Thread.sleep(3000); //Dataverse needs more time... + + i = 0; + do { + Thread.sleep(1000); + lockedForIngest = UtilIT.checkDatasetLocks(datasetId.longValue(), "Ingest", apiToken); + i++; + if (i > 3) break; // only do this three times if ingest takes longer fail the test + } while (lockedForIngest.body().prettyPrint().contains("Ingest")); + + assertTrue("Failed test if Ingest Lock lasts more than sleep(3000)", i <= 3); Response restrictResponse = UtilIT.restrictFile(tabFile3IdRestricted.toString(), true, apiToken); restrictResponse.prettyPrint(); restrictResponse.then().assertThat() @@ -142,7 +160,6 @@ public static void setUp() throws InterruptedException { tabFile4NameUnpublished = "stata14-auto-withstrls.dta"; tabFile4NameUnpublishedConvert = tabFile4NameUnpublished.substring(0, tabFile4NameUnpublished.indexOf(".dta")) + ".tab"; String tab4PathToFile = "scripts/search/data/tabular/" + tabFile4NameUnpublished; - Thread.sleep(1000); //Added because tests are failing during setup, test is probably going too fast. Especially between first and second file Response tab4AddResponse = UtilIT.uploadFileViaNative(datasetId.toString(), tab4PathToFile, apiToken); tabFile4IdUnpublished = JsonPath.from(tab4AddResponse.body().asString()).getInt("data.files[0].dataFile.id"); @@ -444,10 +461,20 @@ public void testRequestAccess() throws InterruptedException { String tabFile3NameRestrictedNew = "stata13-auto-withstrls.dta"; String tab3PathToFile = "scripts/search/data/tabular/" + tabFile3NameRestrictedNew; - Thread.sleep(1000); //Added because tests are failing during setup, test is probably going too fast. Especially between first and second file Response tab3AddResponse = UtilIT.uploadFileViaNative(datasetIdNew.toString(), tab3PathToFile, apiToken); Integer tabFile3IdRestrictedNew = JsonPath.from(tab3AddResponse.body().asString()).getInt("data.files[0].dataFile.id"); - Thread.sleep(3000); //Dataverse needs more time... + + Response lockedForIngest; + int i = 0; + do { + Thread.sleep(1000); + lockedForIngest = UtilIT.checkDatasetLocks(datasetIdNew.longValue(), "Ingest", apiToken); + i++; + if (i > 3) break; // only do this three times if ingest takes longer fail the test + } while (lockedForIngest.body().prettyPrint().contains("Ingest")); + + assertTrue("Failed test if Ingest Lock lasts more than sleep(3000)", i <= 3); + Response restrictResponse = UtilIT.restrictFile(tabFile3IdRestrictedNew.toString(), true, apiToken); restrictResponse.prettyPrint(); restrictResponse.then().assertThat() @@ -518,8 +545,7 @@ public void testRequestAccess() throws InterruptedException { // zipped bundle - that should have the folder hierarchy preserved. @Test public void testZipUploadAndDownload() throws IOException { - // sleep for a couple of sec. - there may still be a tab. ingest in progress: - try {Thread.sleep(3000);}catch(Exception ex){} + System.out.println("Testing round trip zip upload-and-download"); // Upload the zip file that has a mix of files with and without folders: Response uploadZipResponse = UtilIT.uploadFileViaNative(datasetId.toString(), testZipFileWithFolders, apiToken); From e0646cd99e6dccd76b9bffcb10bd9eb7b9228cf6 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 3 Sep 2019 10:30:23 +0200 Subject: [PATCH 72/94] Revert " #6132. Remove orphaned Endorsed Java API specials." This reverts commit 7bc6c6f9ddf3bc33d64cc4d20a8a6f323e4b1b04. --- pom.xml | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/pom.xml b/pom.xml index d2cc860c42d..caa35093a45 100644 --- a/pom.xml +++ b/pom.xml @@ -11,6 +11,7 @@ war dataverse + ${project.build.directory}/endorsed UTF-8 -Xdoclint:none @@ -651,6 +652,9 @@ 1.8 1.8 + + ${endorsed.dir} + ${compilerArgument} @@ -687,6 +691,26 @@ org.apache.maven.plugins maven-dependency-plugin 3.1.1 + + + validate + + copy + + + ${endorsed.dir} + true + + + javax + javaee-endorsed-api + 7.0 + jar + + + + + de.qaware.maven From 78054950a5ed4c437523540bc83206704db62ac3 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Thu, 29 Aug 2019 10:55:43 +0200 Subject: [PATCH 73/94] Solves #6140. Remove orphaned Endorsed Java API specials. Since 2013, the initial commit d6868ad, added the endorsed Java API. These days, there is no reference to be found installing the resulting files. The API was not included in the WAR file. --- pom.xml | 24 ------------------------ 1 file changed, 24 deletions(-) diff --git a/pom.xml b/pom.xml index 97e6b297657..64b142f2812 100644 --- a/pom.xml +++ b/pom.xml @@ -11,7 +11,6 @@ war dataverse - ${project.build.directory}/endorsed UTF-8 -Xdoclint:none @@ -652,9 +651,6 @@ 1.8 1.8 - - ${endorsed.dir} - ${compilerArgument} @@ -691,26 +687,6 @@ org.apache.maven.plugins maven-dependency-plugin 3.1.1 - - - validate - - copy - - - ${endorsed.dir} - true - - - javax - javaee-endorsed-api - 7.0 - jar - - - - - org.jacoco From 2b148fc35ef022a93831f9acf356c7d00940ce92 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 3 Sep 2019 14:09:06 -0400 Subject: [PATCH 74/94] #6128 remove indexing sleeps from DatasetsIT --- .../harvard/iq/dataverse/api/DatasetsIT.java | 30 +++++++++++-------- 1 file changed, 18 insertions(+), 12 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index a756ac474dc..c63dee9c595 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -149,12 +149,13 @@ public void testCreateDataset() { Response publishDataverse = UtilIT.publishDataverseViaSword(dataverseAlias, apiToken); assertEquals(OK.getStatusCode(), publishDataverse.getStatusCode()); - + //Remove random sleep #6128 9/3/2019 // throw in a short sleep, just in case: + /* try { Thread.sleep(1000l); } catch (InterruptedException iex) {} - + */ // ... And now that it's published, try to create a dataset again, // as the "random", not specifically authorized user: // (this time around, it should work!) @@ -341,12 +342,13 @@ public void testCreatePublishDestroyDataset() { .statusCode(403); logger.info("Attempting to publish a major version"); - + //Remove random sleep #6128 9/3/2019 // 3 second sleep, to allow the indexing to finish: + /* try { Thread.sleep(3000l); } catch (InterruptedException iex) {} - +*/ Response publishDataset = UtilIT.publishDatasetViaNativeApi(datasetPersistentId, "major", apiToken); assertEquals(200, publishDataset.getStatusCode()); @@ -484,12 +486,13 @@ public void testExport() { .statusCode(403); logger.info("In testExport; attempting to publish, as major version"); - + //Remove random sleep #6128 9/3/2019 // 3 second sleep, to allow the indexing to finish: + /* try { Thread.sleep(3000l); } catch (InterruptedException iex) {} - + */ Response publishDataset = UtilIT.publishDatasetViaNativeApi(datasetPersistentId, "major", apiToken); assertEquals(200, publishDataset.getStatusCode()); @@ -613,12 +616,13 @@ public void testExcludeEmail() { Response setToExcludeEmailFromExport = UtilIT.setSetting(SettingsServiceBean.Key.ExcludeEmailFromExport, "true"); setToExcludeEmailFromExport.then().assertThat() .statusCode(OK.getStatusCode()); - + //Remove random sleep #6128 9/3/2019 // 3 second sleep, to allow the indexing to finish: + /* try { Thread.sleep(3000l); } catch (InterruptedException iex) {} - + */ Response publishDataset = UtilIT.publishDatasetViaNativeApi(datasetPersistentId, "major", apiToken); assertEquals(200, publishDataset.getStatusCode()); @@ -713,11 +717,12 @@ public void testSequentialNumberAsIdentifierGenerationStyle() { logger.info("identifier: " + identifier); String numericPart = identifier.replace("FK2/", ""); //remove shoulder from identifier assertTrue(StringUtils.isNumeric(numericPart)); - + //Remove random sleep #6128 9/3/2019 +/* try { Thread.sleep(3000l); } catch (Exception ex) {logger.warning("failed to execute sleep 3 sec.");} - +*/ Response deleteDatasetResponse = UtilIT.deleteDatasetViaNativeApi(datasetId, apiToken); deleteDatasetResponse.prettyPrint(); @@ -1729,11 +1734,12 @@ public void testUpdatePIDMetadataAPI() { Response publishDataverse = UtilIT.publishDataverseViaSword(dataverseAlias, apiToken); assertEquals(200, publishDataverse.getStatusCode()); - + //Remove random sleep #6128 9/3/2019 +/* try { Thread.sleep(3000l); } catch (InterruptedException iex){} - + */ Response publishDataset = UtilIT.publishDatasetViaNativeApi(datasetPersistentId, "major", apiToken); assertEquals(200, publishDataset.getStatusCode()); From cc222483d8d21b2b810c390a24f25b13e94e8ecf Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 3 Sep 2019 14:54:48 -0400 Subject: [PATCH 75/94] #6128 change random sleep to Ingest sleep test --- .../edu/harvard/iq/dataverse/api/EditDDIIT.java | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/EditDDIIT.java b/src/test/java/edu/harvard/iq/dataverse/api/EditDDIIT.java index 83adf693ab6..6b8d5908cef 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/EditDDIIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/EditDDIIT.java @@ -3,6 +3,8 @@ import com.jayway.restassured.RestAssured; import com.jayway.restassured.path.json.JsonPath; import com.jayway.restassured.response.Response; +import static edu.harvard.iq.dataverse.api.AccessIT.apiToken; +import static edu.harvard.iq.dataverse.api.AccessIT.datasetId; import edu.harvard.iq.dataverse.datavariable.VarGroup; @@ -32,6 +34,7 @@ import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import java.nio.file.Files; +import static org.junit.Assert.assertTrue; public class EditDDIIT { @@ -77,7 +80,19 @@ public void testUpdateVariableMetadata() throws InterruptedException { assertNotEquals("",origFileId); // Give file time to ingest - Thread.sleep(10000); + // Thread.sleep(10000); + + int i = 0; + Response lockedForIngest = UtilIT.checkDatasetLocks(datasetId.longValue(), "Ingest", apiToken); + do { + lockedForIngest = UtilIT.checkDatasetLocks(datasetId.longValue(), "Ingest", apiToken); + Thread.sleep(1000); + i++; + if (i > 3) { + break; // only do this three times if ingest takes longer fail the test + } + } while (lockedForIngest.body().prettyPrint().contains("Ingest") ); + assertTrue("Failed test if Ingest Lock lasts more than sleep(3000)", i <= 3); Response origXml = UtilIT.getFileMetadata(origFileId, null, apiToken); assertEquals(200, origXml.getStatusCode()); From 7553d358338d51029870e906842a8406ec6b8189 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 4 Sep 2019 10:46:18 -0400 Subject: [PATCH 76/94] #6128 consolidate ingest lock test code --- .../harvard/iq/dataverse/api/AccessIT.java | 34 +++---------------- .../edu/harvard/iq/dataverse/api/UtilIT.java | 25 ++++++++++++++ 2 files changed, 30 insertions(+), 29 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java index b20d6eb91d2..a9f944d39e2 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java @@ -122,29 +122,14 @@ public static void setUp() throws InterruptedException { tabFile3NameRestrictedConvert = tabFile3NameRestricted.substring(0, tabFile3NameRestricted.indexOf(".dta")) + ".tab"; String tab3PathToFile = "scripts/search/data/tabular/" + tabFile3NameRestricted; - Response lockedForIngest; - int i = 0; - do { - Thread.sleep(1000); - lockedForIngest = UtilIT.checkDatasetLocks(datasetId.longValue(), "Ingest", apiToken); - i++; - if (i > 3) break; // only do this three times if ingest takes longer fail the test - } while (lockedForIngest.body().prettyPrint().contains("Ingest")); - - assertTrue("Failed test if Ingest Lock lasts more than sleep(3000)", i <= 3); + assertTrue("Failed test if Ingest Lock lasts more than sleep(3000) " + tabFile3NameRestricted , UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, 3)); Response tab3AddResponse = UtilIT.uploadFileViaNative(datasetId.toString(), tab3PathToFile, apiToken); + tabFile3IdRestricted = JsonPath.from(tab3AddResponse.body().asString()).getInt("data.files[0].dataFile.id"); - i = 0; - do { - Thread.sleep(1000); - lockedForIngest = UtilIT.checkDatasetLocks(datasetId.longValue(), "Ingest", apiToken); - i++; - if (i > 3) break; // only do this three times if ingest takes longer fail the test - } while (lockedForIngest.body().prettyPrint().contains("Ingest")); - - assertTrue("Failed test if Ingest Lock lasts more than sleep(3000)", i <= 3); + assertTrue("Failed test if Ingest Lock lasts more than sleep(3000)" + tabFile3NameRestricted , UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, 3)); + Response restrictResponse = UtilIT.restrictFile(tabFile3IdRestricted.toString(), true, apiToken); restrictResponse.prettyPrint(); restrictResponse.then().assertThat() @@ -464,16 +449,7 @@ public void testRequestAccess() throws InterruptedException { Response tab3AddResponse = UtilIT.uploadFileViaNative(datasetIdNew.toString(), tab3PathToFile, apiToken); Integer tabFile3IdRestrictedNew = JsonPath.from(tab3AddResponse.body().asString()).getInt("data.files[0].dataFile.id"); - Response lockedForIngest; - int i = 0; - do { - Thread.sleep(1000); - lockedForIngest = UtilIT.checkDatasetLocks(datasetIdNew.longValue(), "Ingest", apiToken); - i++; - if (i > 3) break; // only do this three times if ingest takes longer fail the test - } while (lockedForIngest.body().prettyPrint().contains("Ingest")); - - assertTrue("Failed test if Ingest Lock lasts more than sleep(3000)", i <= 3); + assertTrue("Failed test if Ingest Lock lasts more than sleep(3000)" + tab3PathToFile , UtilIT.sleepForLock(datasetIdNew.longValue(), "Ingest", apiToken, 3)); Response restrictResponse = UtilIT.restrictFile(tabFile3IdRestrictedNew.toString(), true, apiToken); restrictResponse.prettyPrint(); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index be329c83d6d..befb5350287 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -35,6 +35,8 @@ import org.hamcrest.Matcher; import static com.jayway.restassured.path.xml.XmlPath.from; import static com.jayway.restassured.RestAssured.given; +import static edu.harvard.iq.dataverse.api.AccessIT.apiToken; +import static edu.harvard.iq.dataverse.api.AccessIT.datasetId; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; @@ -1977,6 +1979,29 @@ public void testSwordStatementWithFiles() { assertEquals("A Dataset with a File", title); } + //Helper function that returns true if a given dataset locked for a given reason is unlocked within + // a given duration returns false if still locked after given duration + static Boolean sleepForLock(long datasetId, String lockType, String apiToken, int duration) { + + Response lockedForIngest = UtilIT.checkDatasetLocks(datasetId, lockType, apiToken); + int i = 0; + do { + try { + lockedForIngest = UtilIT.checkDatasetLocks(datasetId, lockType, apiToken); + Thread.sleep(1000); + i++; + if (i > duration) { + break; + } + } catch (InterruptedException ex) { + Logger.getLogger(UtilIT.class.getName()).log(Level.SEVERE, null, ex); + } + } while (lockedForIngest.body().prettyPrint().contains(lockType)); + + return i <= duration; + + } + static Response checkDatasetLocks(long datasetId, String lockType, String apiToken) { Response response = given() .header(API_TOKEN_HTTP_HEADER, apiToken) From 752d1c27c64c88d2c0c2a508b0fa665d23449b56 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 4 Sep 2019 11:08:31 -0400 Subject: [PATCH 77/94] #6128 add constant for max ingest lock duration --- src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java | 6 +++--- src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java | 1 + 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java index a9f944d39e2..0837db8c5e8 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java @@ -122,13 +122,13 @@ public static void setUp() throws InterruptedException { tabFile3NameRestrictedConvert = tabFile3NameRestricted.substring(0, tabFile3NameRestricted.indexOf(".dta")) + ".tab"; String tab3PathToFile = "scripts/search/data/tabular/" + tabFile3NameRestricted; - assertTrue("Failed test if Ingest Lock lasts more than sleep(3000) " + tabFile3NameRestricted , UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, 3)); + assertTrue("Failed test if Ingest Lock exceeds max duration " + tabFile3NameRestricted , UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION)); Response tab3AddResponse = UtilIT.uploadFileViaNative(datasetId.toString(), tab3PathToFile, apiToken); tabFile3IdRestricted = JsonPath.from(tab3AddResponse.body().asString()).getInt("data.files[0].dataFile.id"); - assertTrue("Failed test if Ingest Lock lasts more than sleep(3000)" + tabFile3NameRestricted , UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, 3)); + assertTrue("Failed test if Ingest Lock exceeds max duration " + tabFile3NameRestricted , UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION)); Response restrictResponse = UtilIT.restrictFile(tabFile3IdRestricted.toString(), true, apiToken); restrictResponse.prettyPrint(); @@ -449,7 +449,7 @@ public void testRequestAccess() throws InterruptedException { Response tab3AddResponse = UtilIT.uploadFileViaNative(datasetIdNew.toString(), tab3PathToFile, apiToken); Integer tabFile3IdRestrictedNew = JsonPath.from(tab3AddResponse.body().asString()).getInt("data.files[0].dataFile.id"); - assertTrue("Failed test if Ingest Lock lasts more than sleep(3000)" + tab3PathToFile , UtilIT.sleepForLock(datasetIdNew.longValue(), "Ingest", apiToken, 3)); + assertTrue("Failed test if Ingest Lock lasts more than sleep(3000)" + tab3PathToFile , UtilIT.sleepForLock(datasetIdNew.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION)); Response restrictResponse = UtilIT.restrictFile(tabFile3IdRestrictedNew.toString(), true, apiToken); restrictResponse.prettyPrint(); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index befb5350287..93e49fd2b06 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -51,6 +51,7 @@ public class UtilIT { private static final String API_TOKEN_KEY = "apiToken"; private static final String BUILTIN_USER_KEY = "burrito"; private static final String EMPTY_STRING = ""; + public static final int MAXIMUM_INGEST_LOCK_DURATION = 3; private static SwordConfigurationImpl swordConfiguration = new SwordConfigurationImpl(); From 2c2f70390b903bf7ab3f2dc29c3aef51f6da6608 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Wed, 4 Sep 2019 11:08:51 -0400 Subject: [PATCH 78/94] changing the scope of two of the dataverse page components to RequestScoped (needs to be reviewed) (ref #6035) --- .../iq/dataverse/ThumbnailServiceWrapper.java | 4 ++- .../search/SearchIncludeFragment.java | 7 +++++- src/main/webapp/search-include-fragment.xhtml | 25 +++++++++++++------ 3 files changed, 27 insertions(+), 9 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java index d94c4a8c490..51d803c5e02 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java @@ -23,6 +23,7 @@ import java.util.HashMap; import java.util.Map; import javax.ejb.EJB; +import javax.enterprise.context.RequestScoped; import javax.faces.view.ViewScoped; import javax.inject.Inject; import javax.inject.Named; @@ -32,7 +33,8 @@ * * @author Leonid Andreev */ -@ViewScoped +//@ViewScoped +@RequestScoped @Named public class ThumbnailServiceWrapper implements java.io.Serializable { @Inject diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java index 109c066295d..a97266aaa03 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java @@ -41,6 +41,7 @@ import java.util.logging.Level; import java.util.logging.Logger; import javax.ejb.EJB; +import javax.enterprise.context.RequestScoped; import javax.faces.context.FacesContext; import javax.faces.view.ViewScoped; import javax.inject.Inject; @@ -48,7 +49,8 @@ import javax.servlet.http.HttpServletRequest; import org.apache.commons.lang.StringUtils; -@ViewScoped +//@ViewScoped +@RequestScoped @Named("SearchIncludeFragment") public class SearchIncludeFragment implements java.io.Serializable { @@ -195,6 +197,7 @@ public String searchRedirect(String dataverseRedirectPage) { * 5. Someday the default sort order for browse mode will be by "release * date" (newest first) but that functionality is not yet available in * the system ( see https://redmine.hmdc.harvard.edu/issues/3628 and + * * https://redmine.hmdc.harvard.edu/issues/3629 ) so for now the default * sort order for browse mode will by alphabetical (sort by name, * ascending). The default sort order for search mode will be by @@ -439,6 +442,8 @@ public void search(boolean onlyDataRelatedToMe) { } } } + + setDisplayCardValues(); } else { // if SOLR is down: diff --git a/src/main/webapp/search-include-fragment.xhtml b/src/main/webapp/search-include-fragment.xhtml index 492918ce04f..770eeafb56d 100644 --- a/src/main/webapp/search-include-fragment.xhtml +++ b/src/main/webapp/search-include-fragment.xhtml @@ -666,18 +666,29 @@ From 8cfcc3e454323662db54c2fc75f3d0f1229c2285 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 4 Sep 2019 11:33:22 -0400 Subject: [PATCH 79/94] #6128 consolidate Ingest test code --- .../edu/harvard/iq/dataverse/api/AccessIT.java | 2 +- .../edu/harvard/iq/dataverse/api/EditDDIIT.java | 15 ++------------- 2 files changed, 3 insertions(+), 14 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java index 0837db8c5e8..f0ea5ffb0c4 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java @@ -449,7 +449,7 @@ public void testRequestAccess() throws InterruptedException { Response tab3AddResponse = UtilIT.uploadFileViaNative(datasetIdNew.toString(), tab3PathToFile, apiToken); Integer tabFile3IdRestrictedNew = JsonPath.from(tab3AddResponse.body().asString()).getInt("data.files[0].dataFile.id"); - assertTrue("Failed test if Ingest Lock lasts more than sleep(3000)" + tab3PathToFile , UtilIT.sleepForLock(datasetIdNew.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION)); + assertTrue("Failed test if Ingest Lock exceeds max duration " + tab3PathToFile , UtilIT.sleepForLock(datasetIdNew.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION)); Response restrictResponse = UtilIT.restrictFile(tabFile3IdRestrictedNew.toString(), true, apiToken); restrictResponse.prettyPrint(); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/EditDDIIT.java b/src/test/java/edu/harvard/iq/dataverse/api/EditDDIIT.java index 6b8d5908cef..1775649c3dd 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/EditDDIIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/EditDDIIT.java @@ -80,20 +80,9 @@ public void testUpdateVariableMetadata() throws InterruptedException { assertNotEquals("",origFileId); // Give file time to ingest - // Thread.sleep(10000); - int i = 0; - Response lockedForIngest = UtilIT.checkDatasetLocks(datasetId.longValue(), "Ingest", apiToken); - do { - lockedForIngest = UtilIT.checkDatasetLocks(datasetId.longValue(), "Ingest", apiToken); - Thread.sleep(1000); - i++; - if (i > 3) { - break; // only do this three times if ingest takes longer fail the test - } - } while (lockedForIngest.body().prettyPrint().contains("Ingest") ); - assertTrue("Failed test if Ingest Lock lasts more than sleep(3000)", i <= 3); - + assertTrue("Failed test if Ingest Lock exceeds max duration " + pathToFileThatGoesThroughIngest , UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION)); + Response origXml = UtilIT.getFileMetadata(origFileId, null, apiToken); assertEquals(200, origXml.getStatusCode()); From 16a84d5cd7cbaa249f9c9cbd21dbfc880ca6094b Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 4 Sep 2019 13:40:58 -0400 Subject: [PATCH 80/94] #6128 change sleeps to ingest tests FilesIT --- .../edu/harvard/iq/dataverse/api/FilesIT.java | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index e5816b55c8d..b890f1d62b1 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -7,6 +7,7 @@ import org.junit.Test; import com.jayway.restassured.path.json.JsonPath; import com.jayway.restassured.path.xml.XmlPath; +import static edu.harvard.iq.dataverse.api.AccessIT.apiToken; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.SystemConfig; @@ -34,6 +35,7 @@ import org.junit.AfterClass; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; public class FilesIT { @@ -495,7 +497,8 @@ public void test_006_ReplaceFileGoodTabular() throws InterruptedException { .statusCode(OK.getStatusCode()); // give file time to ingest - sleep(10000); + // sleep(10000); + assertTrue("Failed test if Ingest Lock exceeds max duration " + pathToFile , UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION)); Response ddi = UtilIT.getFileMetadata(origFileId.toString(), "ddi", apiToken); // ddi.prettyPrint(); @@ -1198,7 +1201,9 @@ public void testUningestFileViaApi() throws InterruptedException { long origFileId = JsonPath.from(addResponse.body().asString()).getLong("data.files[0].dataFile.id"); assertNotNull(origFileId); // If checkOut fails, display message - sleep(10000); + // sleep(10000); + + assertTrue("Failed test if Ingest Lock exceeds max duration " + pathToFile , UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION)); Response uningestFileResponse = UtilIT.uningestFile(origFileId, apiToken); assertEquals(200, uningestFileResponse.getStatusCode()); } @@ -1231,8 +1236,8 @@ public void testFileMetaDataGetUpdateRoundTrip() throws InterruptedException { Response addResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, jsonString, apiToken); Long origFileId = JsonPath.from(addResponse.body().asString()).getLong("data.files[0].dataFile.id"); - sleep(2000); //ensure tsv is consumed - + //sleep(2000); //ensure tsv is consumed + assertTrue("Failed test if Ingest Lock exceeds max duration " + pathToFile , UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION)); msg("Publish dataverse and dataset"); Response publishDataversetResp = UtilIT.publishDataverseViaSword(dataverseAlias, apiToken); publishDataversetResp.then().assertThat() @@ -1330,7 +1335,8 @@ public void testDataSizeInDataverse() throws InterruptedException { .statusCode(OK.getStatusCode()); // wait for it to ingest... - sleep(10000); + assertTrue("Failed test if Ingest Lock exceeds max duration " + pathToFile , UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION)); + // sleep(10000); // This is the magic number - the number of bytes in the 2 files uploaded // above, plus the size of the tab-delimited file generated by the ingest @@ -1375,7 +1381,8 @@ public void testValidateDDI_issue6027() throws InterruptedException { .statusCode(OK.getStatusCode()); // give file time to ingest - sleep(10000); + assertTrue("Failed test if Ingest Lock exceeds max duration " + pathToFile , UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION)); + // sleep(10000); Response ddi = UtilIT.getFileMetadata(origFileId.toString(), "ddi", apiToken); ddi.prettyPrint(); From 5b330aa872b858eb611ecf01eae7a4c8b002f9d3 Mon Sep 17 00:00:00 2001 From: chenganj Date: Thu, 5 Sep 2019 09:47:33 -0400 Subject: [PATCH 81/94] renamed the sql file --- .../{upgrade_v4.15.1_to.sql => upgrade_v4.16_to_4.16.1.sql} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename scripts/database/upgrades/{upgrade_v4.15.1_to.sql => upgrade_v4.16_to_4.16.1.sql} (100%) diff --git a/scripts/database/upgrades/upgrade_v4.15.1_to.sql b/scripts/database/upgrades/upgrade_v4.16_to_4.16.1.sql similarity index 100% rename from scripts/database/upgrades/upgrade_v4.15.1_to.sql rename to scripts/database/upgrades/upgrade_v4.16_to_4.16.1.sql From 16d73b7772d0e6537c481b8773bfd3851b1f9421 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Thu, 5 Sep 2019 11:04:10 -0400 Subject: [PATCH 82/94] documentation entry for jmap and jstat in the "tools" section of the dev. guide (#6035) --- doc/sphinx-guides/source/developers/tools.rst | 133 ++++++++++++++++++ .../edu/harvard/iq/dataverse/LoginPage.java | 3 - 2 files changed, 133 insertions(+), 3 deletions(-) diff --git a/doc/sphinx-guides/source/developers/tools.rst b/doc/sphinx-guides/source/developers/tools.rst index a7442540324..59d381d8d9e 100755 --- a/doc/sphinx-guides/source/developers/tools.rst +++ b/doc/sphinx-guides/source/developers/tools.rst @@ -145,6 +145,139 @@ For example... would be consistent with a file descriptor leak on the dataset page. +jmap and jstat +++++++++++++++ + +``jmap`` and ``jstat`` are parts of the standard JDK distribution. +jmap allows you to look at the contents of the java heap. It can be used to create a heap dump, that you can then feed to another tool, such as ``Memory Analyzer Tool`` (see above). It can also be used as a useful tool of its own, for example, to list all the classes currently instantiated in memory: + +.. code-block:: bash + + $ jmap -histo + +will output a list of all classes, sorted by the number of instances of each individual class, with the size in bytes. +This can be very useful when looking for memory leaks in the application. Another useful tool is ``jstat``, that can be used in combination with ``jmap`` to monitor the effectiveness of garbage collection in reclaiming allocated memory. + +In the example script below we stress running Dataverse applicatione with GET requests to a specific dataverse page, use ``jmap`` to see how many Dataverse, Dataset and DataFile class object get allocated, then run ``jstat`` to see how the numbers are affected by both "Young Generation" and "Full" garbage collection runs (``YGC`` and ``FGC`` respectively): + +(This is script is provided **as an example only**! You will have to experiment and expand it to suit any specific needs and any specific problem you may be trying to diagnose, and this is just to give an idea of how to go about it) + +.. code-block:: bash + + #!/bin/sh + + # the script takes the numeric id of the glassfish process as the command line argument: + id=$1 + + while : + do + # Access the dataverse xxx 10 times in a row: + for ((i = 0; i < 10; i++)) + do + # hide the output, standard and stderr: + curl http://localhost:8080/dataverse/xxx 2>/dev/null > /dev/null + done + + sleep 1 + + # run jmap and save the output in a temp file: + + jmap -histo $id > /tmp/jmap.histo.out + + # grep the output for Dataverse, Dataset and DataFile classes: + grep '\.Dataverse$' /tmp/jmap.histo.out + grep '\.Dataset$' /tmp/jmap.histo.out + grep '\.DataFile$' /tmp/jmap.histo.out + # (or grep for whatever else you may be interested in) + + # print the last line of the jmap output (the totals): + tail -1 /tmp/jmap.histo.out + + # run jstat to check on GC: + jstat -gcutil ${id} 1000 1 2>/dev/null + + # add a time stamp and a new line: + + date + echo + + done + +The script above will run until you stop it, and will output something like: + +.. code-block:: none + + 439: 141 28200 edu.harvard.iq.dataverse.Dataverse + 472: 160 24320 edu.harvard.iq.dataverse.Dataset + 674: 60 9600 edu.harvard.iq.dataverse.DataFile + S0 S1 E O P YGC YGCT FGC FGCT GCT + 0.00 100.00 35.32 20.15 � 7 2.145 0 0.000 2.145 + Total 108808814 5909776392 + Wed Aug 14 23:13:42 EDT 2019 + + 385: 181 36200 edu.harvard.iq.dataverse.Dataverse + 338: 320 48640 edu.harvard.iq.dataverse.Dataset + 524: 120 19200 edu.harvard.iq.dataverse.DataFile + S0 S1 E O P YGC YGCT FGC FGCT GCT + 0.00 100.00 31.69 45.11 � 9 3.693 0 0.000 3.693 + Total 167998691 9080163904 + Wed Aug 14 23:14:59 EDT 2019 + + 367: 201 40200 edu.harvard.iq.dataverse.Dataverse + 272: 480 72960 edu.harvard.iq.dataverse.Dataset + 442: 180 28800 edu.harvard.iq.dataverse.DataFile + S0 S1 E O P YGC YGCT FGC FGCT GCT + 0.00 100.00 28.05 69.94 � 11 5.001 0 0.000 5.001 + Total 226826706 12230221352 + Wed Aug 14 23:16:16 EDT 2019 + + ... etc. + +How to analyze the output, what to look for: + +First, look at the numbers in the jmap output. In the example above, you can immediately see, after the first three iterations, that every 10 dataverse page loads results in the increase of the number of Dataset classes by 160. I.e., each page load leaves 16 of these on the heap. We can also see that each of the 10 page load cycles increased the heap by roughly 3GB; that each cycle resulted in a couple of YG (young generation) garbage collections, and in the old generation allocation being almost 70% full. These numbers in the example are clearly quite high and are an indication of some problematic memory allocation by the dataverse page - if this is the result of something you have added to the page, you probably would want to investigate and fix it. However, overly generous memory use **is not the same as a leak** necessarily. What you want to see now is how much of this allocation can be reclaimed by "Full GC". If all of it gets freed by ``FGC``, it is not the end of the world (even though you do not want your system to spend too much time running ``FGC``; it costs CPU cycles, and actually freezes the application while it's in progress!). It is however a **really** serious problem, if you determine that a growing portion of the old. gen. memory (``"O"`` in the ``jmap`` output) is not getting freed, even by ``FGC``. This *is* a real leak now, i.e. something is leaving behind some objects that are still referenced and thus off limits to garbage collector. So look for the lines where the ``FGC`` counter is incremented. For example, the first ``FGC`` in the example output above: + +.. code-block:: none + + 271: 487 97400 edu.harvard.iq.dataverse.Dataverse + 216: 3920 150784 edu.harvard.iq.dataverse.Dataset + 337: 372 59520 edu.harvard.iq.dataverse.DataFile + Total 277937182 15052367360 + S0 S1 E O P YGC YGCT FGC FGCT GCT + 0.00 100.00 77.66 88.15 � 17 8.734 0 0.000 8.734 + Wed Aug 14 23:20:05 EDT 2019 + + 265: 551 110200 edu.harvard.iq.dataverse.Dataverse + 202: 4080 182400 edu.harvard.iq.dataverse.Dataset + 310: 450 72000 edu.harvard.iq.dataverse.DataFile + Total 142023031 8274454456 + S0 S1 E O P YGC YGCT FGC FGCT GCT + 0.00 100.00 71.95 20.12 � 22 25.034 1 4.455 29.489 + Wed Aug 14 23:21:40 EDT 2019 + +We can see that the first ``FGC`` resulted in reducing the ``"O"`` by almost 7GB, from 15GB down to 8GB (from 88% to 20% full). The number of Dataset classes has not budged at all - it has grown by the same 160 objects as before (very suspicious!). To complicate matters, ``FGC`` does not **guarantee** to free everything that can be freed - it will balance how much the system needs memory vs. how much it is willing to spend in terms of CPU cycles performing GC (remember, the application freezes while ``FGC`` is running!). So you should not assume that the "20% full" number above means that you have 20% of your stack already wasted and unrecoverable. Instead, look for the next **minium** value of ``"O"``; then for the next, etc. Now compare these consecutive miniums. With the above test (this is an output of a real experiment, a particularly memory-hungry feature added to the dataverse page), the minimums sequence (of old. gen. usage, in %) was looking as follows: + + +.. code-block:: none + + 2.19 + 2.53 + 3.00 + 3.13 + 3.95 + 4.03 + 4.21 + 4.40 + 4.64 + 5.06 + 5.17 + etc. ... + +It is clearly growing - so now we can conclude that indeed something there is using memory in a way that's not recoverable, and this is a clear problem. + + + + ---- Previous: :doc:`making-releases` | Next: :doc:`unf/index` diff --git a/src/main/java/edu/harvard/iq/dataverse/LoginPage.java b/src/main/java/edu/harvard/iq/dataverse/LoginPage.java index 3bd6ff08eea..dc2ccb552e2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/LoginPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/LoginPage.java @@ -169,9 +169,6 @@ public String login() { logger.log(Level.FINE, "User authenticated: {0}", r.getEmail()); session.setUser(r); session.configureSessionTimeout(); - //HttpSession httpSession = (HttpSession) FacesContext.getCurrentInstance().getExternalContext().getSession(false); - //logger.log(Level.INFO, "logged in as user "+r.getName()+", jsession id: "+httpSession.getId()+" (setting the lifespan of the session to 1 hour)"); - //httpSession.setMaxInactiveInterval(systemConfig.getLoginSessionTimeout() * 60); // session timeout, in seconds if ("dataverse.xhtml".equals(redirectPage)) { redirectPage = redirectToRoot(); From 8b8aa559672616fe5fc2085026fb3290ee031919 Mon Sep 17 00:00:00 2001 From: Michael Heppler Date: Thu, 5 Sep 2019 16:01:53 -0400 Subject: [PATCH 83/94] Changed italic tags to span tags for custom icons [ref #6144] --- .../files/etc/maintenance/maintenance.xhtml | 2 +- src/main/java/propertyFiles/Bundle.properties | 1 + src/main/webapp/dataverse.xhtml | 4 +- src/main/webapp/dataverse_header.xhtml | 4 +- src/main/webapp/dataverseuser.xhtml | 42 +++++++++---------- src/main/webapp/mydata_fragment.xhtml | 6 +-- src/main/webapp/resources/css/structure.css | 2 +- src/main/webapp/search-include-fragment.xhtml | 8 ++-- 8 files changed, 35 insertions(+), 34 deletions(-) diff --git a/doc/sphinx-guides/source/_static/installation/files/etc/maintenance/maintenance.xhtml b/doc/sphinx-guides/source/_static/installation/files/etc/maintenance/maintenance.xhtml index 819bc4008f4..b20dede85ae 100644 --- a/doc/sphinx-guides/source/_static/installation/files/etc/maintenance/maintenance.xhtml +++ b/doc/sphinx-guides/source/_static/installation/files/etc/maintenance/maintenance.xhtml @@ -66,7 +66,7 @@