diff --git a/doc/sphinx-guides/source/_static/installation/files/etc/init.d/glassfish b/doc/sphinx-guides/source/_static/installation/files/etc/init.d/glassfish.init.root similarity index 100% rename from doc/sphinx-guides/source/_static/installation/files/etc/init.d/glassfish rename to doc/sphinx-guides/source/_static/installation/files/etc/init.d/glassfish.init.root diff --git a/doc/sphinx-guides/source/_static/installation/files/etc/init.d/glassfish.init.service b/doc/sphinx-guides/source/_static/installation/files/etc/init.d/glassfish.init.service new file mode 100755 index 00000000000..32769763f5d --- /dev/null +++ b/doc/sphinx-guides/source/_static/installation/files/etc/init.d/glassfish.init.service @@ -0,0 +1,34 @@ +#! /bin/sh +# chkconfig: 2345 80 01 +# description: GlassFish App Server +set -e + +ASADMIN=/usr/local/glassfish4/bin/asadmin +GF_USER=glassfish + +case "$1" in + start) + echo -n "Starting GlassFish server: glassfish" + # Increase file descriptor limit: + ulimit -n 32768 + # Allow "memory overcommit": + # (basically, this allows to run exec() calls from inside the + # app, without the Unix fork() call physically hogging 2X + # the amount of memory glassfish is already using) + echo 1 > /proc/sys/vm/overcommit_memory + LANG=en_US.UTF-8; export LANG + + sudo -u $GF_USER $ASADMIN start-domain domain1 + ;; + stop) + echo -n "Stopping GlassFish server: glassfish" + + sudo -u $GF_USER $ASADMIN stop-domain domain1 + echo "." + ;; + *) + echo "Usage: /etc/init.d/glassfish {start|stop}" + + exit 1 +esac +exit 0 diff --git a/doc/sphinx-guides/source/_static/installation/files/etc/systemd/glassfish.service b/doc/sphinx-guides/source/_static/installation/files/etc/systemd/glassfish.service new file mode 100644 index 00000000000..4132ad5a544 --- /dev/null +++ b/doc/sphinx-guides/source/_static/installation/files/etc/systemd/glassfish.service @@ -0,0 +1,13 @@ +[Unit] +Description = GlassFish Server v4.1 +After = syslog.target network.target + +[Service] +User=glassfish +ExecStart = /usr/bin/java -jar /usr/local/glassfish4/glassfish/lib/client/appserver-cli.jar start-domain +ExecStop = /usr/bin/java -jar /usr/local/glassfish4/glassfish/lib/client/appserver-cli.jar stop-domain +ExecReload = /usr/bin/java -jar /usr/local/glassfish4/glassfish/lib/client/appserver-cli.jar restart-domain +Type = forking + +[Install] +WantedBy = multi-user.target diff --git a/doc/sphinx-guides/source/installation/installation-main.rst b/doc/sphinx-guides/source/installation/installation-main.rst index 92e106edc78..d4bacd073b5 100755 --- a/doc/sphinx-guides/source/installation/installation-main.rst +++ b/doc/sphinx-guides/source/installation/installation-main.rst @@ -27,15 +27,24 @@ Execute the installer script like this:: # cd dvinstall # ./install -**NEW in Dataverse 4.3:** It is no longer necessary to run the installer as root! -Just make sure the user that runs the installer has the write permission in the Glassfish directory. For example, if your Glassfish directory is owned by root, and you try to run the installer as a regular user, it's not going to work. -(Do note, that you want the Glassfish directory to be owned by the same user that will be running Glassfish. And you most likely won't need to run it as root. The only reason to run Glassfish as root would be to have a convenient way to run the application on the default HTTP(S) ports 80 and 443, instead of 8080 and 8181. However, an easier, and more secure way to achieve that would be to instead keep Glassfish running on a high port, and hide it behind an Apache Proxy, via AJP, running on port 80. This configuration is in fact required if you choose to have your Dataverse support Shibboleth authentication. See more discussion on this here: :doc:`shibboleth`.) +It is no longer necessary to run the installer as root! +======================================================= + +Just make sure the user running the installer has write permission to: + +- /usr/local/glassfish4/glassfish/lib +- /usr/local/glassfish4/glassfish/domains/domain1 +- the current working directory of the installer (it currently writes its logfile there), and +- your jvm-option specified files.dir + +The only reason to run Glassfish as root would be to allow Glassfish itself to listen on the default HTTP(S) ports 80 and 443, or any other port below 1024. However, it is simpler and more secure to run Glassfish run on its default port of 8080 and hide it behind an Apache Proxy, via AJP, running on port 80 or 443. This configuration is required if you're going to use Shibboleth authentication. See more discussion on this here: :doc:`shibboleth`.) The script will prompt you for some configuration values. If this is a test/evaluation installation, it may be possible to accept the default values provided for most of the settings: - Internet Address of your host: localhost - Glassfish Directory: /usr/local/glassfish4 +- Glassfish User: current user running the installer script - Administrator email address for this Dataverse: (none) - SMTP (mail) server to relay notification messages: localhost - Postgres Server Address: [127.0.0.1] diff --git a/doc/sphinx-guides/source/installation/prerequisites.rst b/doc/sphinx-guides/source/installation/prerequisites.rst index 1829c6396e8..166505f0b39 100644 --- a/doc/sphinx-guides/source/installation/prerequisites.rst +++ b/doc/sphinx-guides/source/installation/prerequisites.rst @@ -2,7 +2,7 @@ Prerequisites ============= -Before running the Dataverse installation script, you must install and configure the following software, preferably on a distribution of Linux such as RHEL or its derivatives such as CentOS. After following all the steps below (which have been written based on CentOS 6), you can proceed to the :doc:`installation-main` section. +Before running the Dataverse installation script, you must install and configure the following software, preferably on a Linux distribution such as RHEL or CentOS. After following all the steps below (which are mostly based on CentOS 6), you can proceed to the :doc:`installation-main` section. You **may** find it helpful to look at how the configuration is done automatically by various tools such as Vagrant, Puppet, or Ansible. See the :doc:`prep` section for pointers on diving into these scripts. @@ -43,7 +43,7 @@ Glassfish Version 4.1 is required. There are known issues with Glassfish 4.1.1 a Installing Glassfish ==================== -**Important**: once Glassfish is installed, a new version of the Weld library (v2.2.10.SP1) must be downloaded and installed. This fixes a serious issue in the library supplied with Glassfish 4.1 ( see https://github.com/IQSS/dataverse/issues/647 for details). Please note that if you plan to front Glassfish with Apache you must also patch Grizzly as explained in the :doc:`shibboleth` section. +**Note:** The Dataverse installer need not be run as root, and it is recommended that Glassfish not run as root either. We suggest the creation of a glassfish service account for this purpose. - Download and install Glassfish (installed in ``/usr/local/glassfish4`` in the example commands below):: @@ -51,32 +51,45 @@ Installing Glassfish # unzip glassfish-4.1.zip # mv glassfish4 /usr/local +If you intend to install and run Glassfish under a service account (and we hope you do), chown -R the Glassfish hierarchy to root to protect it but give the service account access to the below directories: + +- Set service account permissions:: + + # chown -R root:root /usr/local/glassfish4 + # chown glassfish /usr/local/glassfish4/glassfish/lib + # chown -R glassfish:glassfish /usr/local/glassfish4/glassfish/domains/domain1 + +After installation, you may chown the lib/ directory back to root; the installer only needs write access to copy the JDBC driver into that directory. + +Once Glassfish is installed, you'll need a newer version of the Weld library (v2.2.10.SP1) to fix a serious issue in the library supplied with Glassfish 4.1 (see https://github.com/IQSS/dataverse/issues/647 for details). If you plan to front Glassfish with Apache you must also patch Grizzly as explained in the :doc:`shibboleth` section. + - Remove the stock Weld jar; download Weld v2.2.10.SP1 and install it in the modules folder:: # cd /usr/local/glassfish4/glassfish/modules # rm weld-osgi-bundle.jar # wget http://central.maven.org/maven2/org/jboss/weld/weld-osgi-bundle/2.2.10.SP1/weld-osgi-bundle-2.2.10.SP1-glassfish4.jar - # /usr/local/glassfish4/bin/asadmin start-domain -- Verify the Weld version:: +- Change from ``-client`` to ``-server`` under ``-client``:: - # /usr/local/glassfish4/bin/asadmin osgi lb | grep 'Weld OSGi Bundle' - -- Stop Glassfish and change from ``-client`` to ``-server`` under ``-client``:: - - # /usr/local/glassfish4/bin/asadmin stop-domain # vim /usr/local/glassfish4/glassfish/domains/domain1/config/domain.xml This recommendation comes from http://blog.c2b2.co.uk/2013/07/glassfish-4-performance-tuning.html among other places. -Glassfish Init Script -===================== +- Start Glassfish and verify the Weld version:: + + # /usr/local/glassfish4/bin/asadmin start-domain + # /usr/local/glassfish4/bin/asadmin osgi lb | grep 'Weld OSGi Bundle' + +Launching Glassfish on system boot +================================== -The Dataverse installation script will start Glassfish if necessary, but while you're configuring Glassfish, you might find the following init script helpful to have Glassfish start on boot. +The Dataverse installation script will start Glassfish if necessary, but you may find the following scripts helpful to launch Glassfish start automatically on boot. -Adjust this :download:`Glassfish init script <../_static/installation/files/etc/init.d/glassfish>` for your needs or write your own. +- This :download:`Systemd file<../_static/installation/files/etc/systemd/glassfish.service>` may be serve as a reference for systems using Systemd (such as RHEL/CentOS 7 or Ubuntu 16+) +- This :download:`init script<../_static/installation/files/etc/init.d/glassfish.init.service>` may be useful for RHEL/CentOS6 or Ubuntu >= 14 if you're using a Glassfish service account, or +- This :download:`Glassfish init script <../_static/installation/files/etc/init.d/glassfish.init.root>` may be helpful if you're just going to run Glassfish as root. -It is not necessary to have Glassfish running before you execute the Dataverse installation script because it will start Glassfish for you. +It is not necessary for Glassfish to be running before you execute the Dataverse installation script; it will start Glassfish for you. Please note that you must run Glassfish in an English locale. If you are using something like ``LANG=de_DE.UTF-8``, ingest of tabular data will fail with the message "RoundRoutines:decimal separator no in right place". diff --git a/scripts/database/upgrades/upgrade_v4.7.1_to_v4.7.2.sql b/scripts/database/upgrades/upgrade_v4.7.1_to_v4.7.2.sql index cc5ca5e403a..2ec31218bc7 100644 --- a/scripts/database/upgrades/upgrade_v4.7.1_to_v4.7.2.sql +++ b/scripts/database/upgrades/upgrade_v4.7.1_to_v4.7.2.sql @@ -4,10 +4,12 @@ ALTER TABLE dvobject ADD COLUMN storageidentifier character varying(255); UPDATE dvobject SET storageidentifier=(SELECT datafile.filesystemname FROM datafile -WHERE datafile.id=dvobject.id AND dvobject.dtype='DataFile'); +WHERE datafile.id=dvobject.id AND dvobject.dtype='DataFile') where dvobject.dtype='DataFile'; UPDATE dvobject SET storageidentifier=(select concat('file://',authority::text,ds.doiseparator::text,ds.identifier::text) FROM dataset ds WHERE dvobject.id=ds.id) -WHERE storageidentifier IS NULL; \ No newline at end of file +WHERE storageidentifier IS NULL; + +ALTER TABLE datafile DROP COLUMN filesystemname; diff --git a/scripts/installer/install b/scripts/installer/install index 3abbae96348..a620cb00eaa 100755 --- a/scripts/installer/install +++ b/scripts/installer/install @@ -9,6 +9,7 @@ use File::Copy; my $verbose; my $pg_only; my $hostname; +my $gfuser; my $gfdir; my $mailserver; my $yes; @@ -21,6 +22,7 @@ my ($rez) = GetOptions( "verbose" => \$verbose, "pg_only" => \$pg_only, "hostname=s" => \$hostname, + "gfuser=s" => \$gfuser, "gfdir=s" => \$gfdir, "mailserver=s" => \$mailserver, "y|yes" => \$yes, @@ -45,6 +47,7 @@ else @CONFIG_VARIABLES = ( 'HOST_DNS_ADDRESS', + 'GLASSFISH_USER', 'GLASSFISH_DIRECTORY', 'ADMIN_EMAIL', 'MAIL_SERVER', @@ -70,7 +73,9 @@ else my %CONFIG_DEFAULTS = ( 'HOST_DNS_ADDRESS', 'localhost', + 'GLASSFISH_USER', '', 'GLASSFISH_DIRECTORY', '/usr/local/glassfish4', + 'GLASSFISH_USER', '', 'ADMIN_EMAIL', '', 'MAIL_SERVER', 'mail.hmdc.harvard.edu', @@ -93,6 +98,7 @@ my %CONFIG_DEFAULTS = ( ); my %CONFIG_PROMPTS = ( 'HOST_DNS_ADDRESS', 'Fully Qualified Domain Name of your host', + 'GLASSFISH_USER', 'Glassfish service account username', 'GLASSFISH_DIRECTORY', 'Glassfish Directory', 'ADMIN_EMAIL', 'Administrator email address for this Dataverse', 'MAIL_SERVER', 'SMTP (mail) server to relay notification messages', @@ -118,6 +124,7 @@ my %CONFIG_PROMPTS = ( my %CONFIG_COMMENTS = ( 'HOST_DNS_ADDRESS', ":\n(enter numeric IP address, if FQDN is unavailable) ", + 'GLASSFISH_USER', ":\nThis user will be running Glassfish service on your system.\n - If this is a dev. environment, this should be your own username; \n - In production, we suggest \"glassfish\" or another unprivileged user\n: ", 'GLASSFISH_DIRECTORY', '', 'ADMIN_EMAIL', ":\n(please enter a valid email address!) ", 'MAIL_SERVER', '', @@ -202,6 +209,37 @@ else print("using hard-coded default configuration values ($config_default_file not found)\n"); } +# get current user. first one wins. +my $current_user = $ENV{LOGNAME} || $ENV{USER} || getpwuid($<); + +if (!$CONFIG_DEFAULTS{'GLASSFISH_USER'}) { + $CONFIG_DEFAULTS{'GLASSFISH_USER'} = $current_user; + print "No pre-configured user found; using $current_user.\n"; +} + +# command-line argument takes precendence +if ($gfuser) { + print "Using CLI-specified user $gfuser.\n"; + $CONFIG_DEFAULTS{'GLASSFISH_USER'} = $gfuser; +} + +# prefer that we not install as root. +unless ( $< != 0 ) { +print "####################################################################\n"; +print " It is recommended that this script not be run as root.\n"; +print " Consider creating a glassfish service account, giving it ownership\n"; +print " on the glassfish/domains/domain1/ and glassfish/lib/ directories,\n"; +print " along with the JVM-specified files.dir location, and running\n"; +print " this installer as the user who will launch Glassfish.\n"; +print "####################################################################\n"; +} + +# ensure $gfuser exists or bail +my $gfidcmd="id $CONFIG_DEFAULTS{'GLASSFISH_USER'}"; +my $gfreturncode=system($gfidcmd); +if ($gfreturncode != 0) { + die "Couldn't find user $gfuser. Please ensure the account exists and is readable by the user running this installer.\n"; +} if ($mailserver) { $CONFIG_DEFAULTS{'MAIL_SERVER'} = $mailserver; @@ -358,6 +396,15 @@ for my $ENTRY (@CONFIG_VARIABLES) # TODO: # verify that we can write in the Glassfish directory # (now that we are no longer requiring to run the installer as root) + + my $g_testdir = $g_dir . "/glassfish/domains/domain1"; + my $g_libdir = $g_dir . "/glassfish/lib"; + if (!(-w $g_testdir)) { + die("$g_testdir not writable. Have you created a glassfish user, and given it write permission on $g_testdir?\n"); + } elsif (!(-w $g_libdir)) { + die("$g_libdir not writable. Have you created a glassfish user, and given it write permission on $g_libdir?\n"); + } + } print "OK!\n"; @@ -1011,7 +1058,7 @@ my $DOMAIN_DOWN = print STDERR $DOMAIN_DOWN . "\n"; if ($DOMAIN_DOWN) { print "Trying to start domain up...\n"; - system( $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'} . "/bin/asadmin start-domain domain1" ); + system( "sudo -u $CONFIG_DEFAULTS{'GLASSFISH_USER'} " . $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'} . "/bin/asadmin start-domain domain1" ); # TODO: (?) - retest that the domain is running now? } diff --git a/scripts/vagrant/install-dataverse.sh b/scripts/vagrant/install-dataverse.sh index cd6aba66401..ac482171539 100644 --- a/scripts/vagrant/install-dataverse.sh +++ b/scripts/vagrant/install-dataverse.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash + if [ ! -z "$1" ]; then MAILSERVER=$1 MAILSERVER_ARG="--mailserver $MAILSERVER" @@ -17,12 +18,16 @@ pid=$$ if [ -e default.config ]; then mv default.config tmp-${pid}-default.config fi + echo "HOST_DNS_ADDRESS localhost" > default.config echo "GLASSFISH_DIRECTORY /home/glassfish/glassfish4" >> default.config + if [ ! -z "$MAILSERVER" ]; then echo "MAIL_SERVER $MAILSERVER" >> default.config fi + ./install -y -f + if [ -e tmp-${pid}-default.config ]; then # if we moved it out, move it back mv -f tmp-${pid}-default.config default.config fi diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFile.java b/src/main/java/edu/harvard/iq/dataverse/DataFile.java index e09ff3df3b1..4273f00a4c5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFile.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFile.java @@ -10,7 +10,7 @@ import edu.harvard.iq.dataverse.api.WorldMapRelatedData; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.dataaccess.DataAccess; -import edu.harvard.iq.dataverse.dataaccess.DataFileIO; +import edu.harvard.iq.dataverse.dataaccess.StorageIO; import edu.harvard.iq.dataverse.dataset.DatasetThumbnail; import edu.harvard.iq.dataverse.ingest.IngestReport; import edu.harvard.iq.dataverse.ingest.IngestRequest; @@ -82,10 +82,10 @@ public class DataFile extends DvObject implements Comparable { private String contentType; - @Expose - @SerializedName("storageIdentifier") - @Column( nullable = false ) - private String fileSystemName; +// @Expose +// @SerializedName("storageIdentifier") +// @Column( nullable = false ) +// private String fileSystemName; /** * End users will see "SHA-1" (with a hyphen) rather than "SHA1" in the GUI @@ -438,13 +438,13 @@ public void setOwner(Dataset dataset) { super.setOwner(dataset); } - public String getStorageIdentifier() { - return this.fileSystemName; - } - - public void setStorageIdentifier(String storageIdentifier) { - this.fileSystemName = storageIdentifier; - } +// public String getStorageIdentifier() { +// return this.fileSystemName; +// } +// +// public void setStorageIdentifier(String storageIdentifier) { +// this.fileSystemName = storageIdentifier; +// } public String getDescription() { FileMetadata fmd = getLatestFileMetadata(); @@ -555,14 +555,14 @@ public String getOriginalChecksumType() { return BundleUtil.getStringFromBundle("file.originalChecksumType", Arrays.asList(this.checksumType.toString()) ); } - public DataFileIO getDataFileIO() throws IOException { - DataFileIO dataFileIO = DataAccess.getDataFileIO(this); + public StorageIO getStorageIO() throws IOException { + StorageIO storageIO = DataAccess.getStorageIO(this); - if (dataFileIO == null) { - throw new IOException("Failed to create DataFileIO for datafile."); + if (storageIO == null) { + throw new IOException("Failed to create storageIO for datafile."); } - return dataFileIO; + return storageIO; } /* diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java index 0599f508ddd..53f946ae65e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java @@ -199,8 +199,8 @@ public List findAllRelatedByRootDatafileId(Long datafileId) { public DataFile findByStorageIdandDatasetVersion(String storageId, DatasetVersion dv) { try { - Query query = em.createNativeQuery("select o.id from datafile o, filemetadata m " + - "where o.filesystemname = '" + storageId + "' and o.id = m.datafile_id and m.datasetversion_id = " + + Query query = em.createNativeQuery("select o.id from dvobject o, filemetadata m " + + "where o.storageidentifier = '" + storageId + "' and o.id = m.datafile_id and m.datasetversion_id = " + dv.getId() + ""); query.setMaxResults(1); if (query.getResultList().size() < 1) { @@ -314,7 +314,7 @@ public DataFile findCheapAndEasy(Long id) { Object[] result = null; try { - result = (Object[]) em.createNativeQuery("SELECT t0.ID, t0.CREATEDATE, t0.INDEXTIME, t0.MODIFICATIONTIME, t0.PERMISSIONINDEXTIME, t0.PERMISSIONMODIFICATIONTIME, t0.PUBLICATIONDATE, t0.CREATOR_ID, t0.RELEASEUSER_ID, t0.PREVIEWIMAGEAVAILABLE, t1.CONTENTTYPE, t1.FILESYSTEMNAME, t1.FILESIZE, t1.INGESTSTATUS, t1.CHECKSUMVALUE, t1.RESTRICTED, t3.ID, t3.AUTHORITY, t3.IDENTIFIER, t1.CHECKSUMTYPE, t1.PREVIOUSDATAFILEID, t1.ROOTDATAFILEID FROM DVOBJECT t0, DATAFILE t1, DVOBJECT t2, DATASET t3 WHERE ((t0.ID = " + id + ") AND (t0.OWNER_ID = t2.ID) AND (t2.ID = t3.ID) AND (t1.ID = t0.ID))").getSingleResult(); + result = (Object[]) em.createNativeQuery("SELECT t0.ID, t0.CREATEDATE, t0.INDEXTIME, t0.MODIFICATIONTIME, t0.PERMISSIONINDEXTIME, t0.PERMISSIONMODIFICATIONTIME, t0.PUBLICATIONDATE, t0.CREATOR_ID, t0.RELEASEUSER_ID, t0.PREVIEWIMAGEAVAILABLE, t1.CONTENTTYPE, t0.STORAGEIDENTIFIER, t1.FILESIZE, t1.INGESTSTATUS, t1.CHECKSUMVALUE, t1.RESTRICTED, t3.ID, t3.AUTHORITY, t3.IDENTIFIER, t1.CHECKSUMTYPE, t1.PREVIOUSDATAFILEID, t1.ROOTDATAFILEID FROM DVOBJECT t0, DATAFILE t1, DVOBJECT t2, DATASET t3 WHERE ((t0.ID = " + id + ") AND (t0.OWNER_ID = t2.ID) AND (t2.ID = t3.ID) AND (t1.ID = t0.ID))").getSingleResult(); } catch (Exception ex) { return null; } @@ -569,7 +569,7 @@ public void findFileMetadataOptimizedExperimental(Dataset owner, DatasetVersion i = 0; - List fileResults = em.createNativeQuery("SELECT t0.ID, t0.CREATEDATE, t0.INDEXTIME, t0.MODIFICATIONTIME, t0.PERMISSIONINDEXTIME, t0.PERMISSIONMODIFICATIONTIME, t0.PUBLICATIONDATE, t0.CREATOR_ID, t0.RELEASEUSER_ID, t1.CONTENTTYPE, t1.FILESYSTEMNAME, t1.FILESIZE, t1.INGESTSTATUS, t1.CHECKSUMVALUE, t1.RESTRICTED, t1.CHECKSUMTYPE, t1.PREVIOUSDATAFILEID, t1.ROOTDATAFILEID FROM DVOBJECT t0, DATAFILE t1 WHERE ((t0.OWNER_ID = " + owner.getId() + ") AND ((t1.ID = t0.ID) AND (t0.DTYPE = 'DataFile'))) ORDER BY t0.ID").getResultList(); + List fileResults = em.createNativeQuery("SELECT t0.ID, t0.CREATEDATE, t0.INDEXTIME, t0.MODIFICATIONTIME, t0.PERMISSIONINDEXTIME, t0.PERMISSIONMODIFICATIONTIME, t0.PUBLICATIONDATE, t0.CREATOR_ID, t0.RELEASEUSER_ID, t1.CONTENTTYPE, t0.STORAGEIDENTIFIER, t1.FILESIZE, t1.INGESTSTATUS, t1.CHECKSUMVALUE, t1.RESTRICTED, t1.CHECKSUMTYPE, t1.PREVIOUSDATAFILEID, t1.ROOTDATAFILEID FROM DVOBJECT t0, DATAFILE t1 WHERE ((t0.OWNER_ID = " + owner.getId() + ") AND ((t1.ID = t0.ID) AND (t0.DTYPE = 'DataFile'))) ORDER BY t0.ID").getResultList(); for (Object[] result : fileResults) { Integer file_id = (Integer) result[0]; diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index d73c37f593a..231467dc193 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -5,7 +5,7 @@ import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUserServiceBean; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser; -import edu.harvard.iq.dataverse.dataaccess.DataFileIO; +import edu.harvard.iq.dataverse.dataaccess.StorageIO; import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter; import edu.harvard.iq.dataverse.dataaccess.SwiftAccessIO; import edu.harvard.iq.dataverse.dataset.DatasetThumbnail; @@ -414,9 +414,9 @@ public String getSwiftContainerName(){ String swiftContainerName; try { - DataFileIO dataFileIO = getInitialDataFile().getDataFileIO(); + StorageIO storageIO = getInitialDataFile().getStorageIO(); try { - SwiftAccessIO swiftIO = (SwiftAccessIO) dataFileIO; + SwiftAccessIO swiftIO = (SwiftAccessIO) storageIO; swiftIO.open(); swiftContainerName = swiftIO.getSwiftContainerName(); logger.info("Swift container name: " + swiftContainerName); diff --git a/src/main/java/edu/harvard/iq/dataverse/FilePage.java b/src/main/java/edu/harvard/iq/dataverse/FilePage.java index 7e3f1bbccd1..63113a66f09 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FilePage.java +++ b/src/main/java/edu/harvard/iq/dataverse/FilePage.java @@ -9,7 +9,7 @@ import edu.harvard.iq.dataverse.dataaccess.SwiftAccessIO; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; import edu.harvard.iq.dataverse.authorization.Permission; -import edu.harvard.iq.dataverse.dataaccess.DataFileIO; +import edu.harvard.iq.dataverse.dataaccess.StorageIO; import edu.harvard.iq.dataverse.datasetutility.TwoRavensHelper; import edu.harvard.iq.dataverse.datasetutility.WorldMapPermissionHelper; import edu.harvard.iq.dataverse.engine.command.Command; @@ -573,9 +573,9 @@ public Boolean isSwiftStorage () { public String getSwiftContainerName(){ String swiftContainerName; try { - DataFileIO dataFileIO = getFile().getDataFileIO(); + StorageIO storageIO = getFile().getStorageIO(); try { - SwiftAccessIO swiftIO = (SwiftAccessIO) dataFileIO; + SwiftAccessIO swiftIO = (SwiftAccessIO) storageIO; swiftIO.open(); swiftContainerName = swiftIO.getSwiftContainerName(); logger.info("Swift container name: " + swiftContainerName); @@ -680,11 +680,11 @@ public boolean isPubliclyDownloadable() { public String getPublicDownloadUrl() { try { - DataFileIO dataFileIO = getFile().getDataFileIO(); - if (dataFileIO instanceof SwiftAccessIO) { + StorageIO storageIO = getFile().getStorageIO(); + if (storageIO instanceof SwiftAccessIO) { String fileDownloadUrl = null; try { - SwiftAccessIO swiftIO = (SwiftAccessIO) dataFileIO; + SwiftAccessIO swiftIO = (SwiftAccessIO) storageIO; swiftIO.open(); fileDownloadUrl = swiftIO.getRemoteUrl(); logger.info("Swift url: " + fileDownloadUrl); diff --git a/src/main/java/edu/harvard/iq/dataverse/HarvestingClientsPage.java b/src/main/java/edu/harvard/iq/dataverse/HarvestingClientsPage.java index f88f1fa583a..545c42b4495 100644 --- a/src/main/java/edu/harvard/iq/dataverse/HarvestingClientsPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/HarvestingClientsPage.java @@ -442,6 +442,8 @@ public void saveClient(ActionEvent ae) { // see the comment above, about the day of week. same here. } harvestingClient.setScheduleHourOfDay(getHourOfDay()); + } else { + harvestingClient.setScheduled(false); } // will try to save it now: diff --git a/src/main/java/edu/harvard/iq/dataverse/MapLayerMetadataServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/MapLayerMetadataServiceBean.java index d079dfefba8..dbbe8b3d8bb 100755 --- a/src/main/java/edu/harvard/iq/dataverse/MapLayerMetadataServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/MapLayerMetadataServiceBean.java @@ -10,7 +10,7 @@ import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; -import edu.harvard.iq.dataverse.dataaccess.DataFileIO; +import edu.harvard.iq.dataverse.dataaccess.StorageIO; import edu.harvard.iq.dataverse.util.SystemConfig; import edu.harvard.iq.dataverse.worldmapauth.WorldMapToken; import edu.harvard.iq.dataverse.worldmapauth.WorldMapTokenServiceBean; @@ -182,14 +182,14 @@ private boolean deleteOlderMapThumbnails(DataFile dataFile) { try { - DataFileIO dataFileIO = dataFile.getDataFileIO(); + StorageIO storageIO = dataFile.getStorageIO(); - if (dataFileIO == null) { - logger.warning("Null DataFileIO in deleteOlderMapThumbnails()"); + if (storageIO == null) { + logger.warning("Null storageIO in deleteOlderMapThumbnails()"); return false; } - dataFileIO.open(); - List cachedObjectsTags = dataFileIO.listAuxObjects(); + storageIO.open(); + List cachedObjectsTags = storageIO.listAuxObjects(); if (cachedObjectsTags != null) { String iconBaseTag = "img"; @@ -198,13 +198,13 @@ private boolean deleteOlderMapThumbnails(DataFile dataFile) { logger.info("found AUX tag: "+cachedFileTag); if (iconBaseTag.equals(cachedFileTag) || cachedFileTag.startsWith(iconThumbTagPrefix)) { logger.info("deleting cached AUX object "+cachedFileTag); - dataFileIO.deleteAuxObject(cachedFileTag); + storageIO.deleteAuxObject(cachedFileTag); } } } /* * Below is the old-style code that was assuming that all the files are - * stored on a local filesystem. The DataFileIO code, above, should + * stored on a local filesystem. The StorageIO code, above, should * be used instead for all the operations on the physical files associated * with DataFiles. // Get the parent directory @@ -289,9 +289,9 @@ public boolean retrieveMapImageForIcon(MapLayerMetadata mapLayerMetadata) throws imageUrl = imageUrl.replace("https:", "http:"); logger.info("Attempt to retrieve map image: " + imageUrl); - DataFileIO dataAccess = null; + StorageIO dataAccess = null; try { - dataAccess = mapLayerMetadata.getDataFile().getDataFileIO(); + dataAccess = mapLayerMetadata.getDataFile().getStorageIO(); } catch (IOException ioEx) { dataAccess = null; } diff --git a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java index 4f91dbc7c7e..7f649c67d3f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java @@ -6,7 +6,7 @@ package edu.harvard.iq.dataverse; import edu.harvard.iq.dataverse.dataaccess.DataAccess; -import edu.harvard.iq.dataverse.dataaccess.DataFileIO; +import edu.harvard.iq.dataverse.dataaccess.StorageIO; import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter; import edu.harvard.iq.dataverse.dataset.DatasetUtil; import static edu.harvard.iq.dataverse.dataset.DatasetUtil.datasetLogoThumbnail; @@ -199,13 +199,13 @@ public String getDatasetCardImageAsBase64Url(Dataset dataset, Long versionId) { } String cardImageUrl = null; - DataFileIO dataAccess = null; + StorageIO dataAccess = null; try{ - dataAccess = DataAccess.getDataFileIO(dataset); + dataAccess = DataAccess.getStorageIO(dataset); } catch(IOException ioex){ - return null; + // return null; } InputStream in = null; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Access.java b/src/main/java/edu/harvard/iq/dataverse/api/Access.java index bd659d0f69e..53de5a4663e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Access.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Access.java @@ -27,7 +27,7 @@ import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser; import edu.harvard.iq.dataverse.authorization.users.GuestUser; import edu.harvard.iq.dataverse.authorization.users.User; -import edu.harvard.iq.dataverse.dataaccess.DataFileIO; +import edu.harvard.iq.dataverse.dataaccess.StorageIO; import edu.harvard.iq.dataverse.dataaccess.DataFileZipper; import edu.harvard.iq.dataverse.dataaccess.OptionalAccessService; import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter; @@ -576,10 +576,10 @@ public InputStream fileCardImage(@PathParam("fileId") Long fileId, @Context UriI return null; } - DataFileIO thumbnailDataAccess = null; + StorageIO thumbnailDataAccess = null; try { - DataFileIO dataAccess = df.getDataFileIO(); + StorageIO dataAccess = df.getStorageIO(); if (dataAccess != null) { // && dataAccess.isLocalFile()) { dataAccess.open(); @@ -617,7 +617,7 @@ public InputStream dsCardImage(@PathParam("versionId") Long versionId, @Context } //String imageThumbFileName = null; - DataFileIO thumbnailDataAccess = null; + StorageIO thumbnailDataAccess = null; // First, check if this dataset has a designated thumbnail image: @@ -627,7 +627,7 @@ public InputStream dsCardImage(@PathParam("versionId") Long versionId, @Context if (logoDataFile != null) { try { - DataFileIO dataAccess = logoDataFile.getDataFileIO(); + StorageIO dataAccess = logoDataFile.getStorageIO(); if (dataAccess != null) { // && dataAccess.isLocalFile()) { dataAccess.open(); thumbnailDataAccess = ImageThumbConverter.getImageThumbnailAsInputStream(dataAccess, 48); @@ -706,7 +706,7 @@ public InputStream dvCardImage(@PathParam("dataverseId") Long dataverseId, @Cont // And we definitely don't want to be doing this for harvested // dataverses: /* - DataFileIO thumbnailDataAccess = null; + StorageIO thumbnailDataAccess = null; if (!dataverse.isHarvested()) { for (Dataset dataset : datasetService.findPublishedByOwnerId(dataverseId)) { @@ -736,9 +736,9 @@ public InputStream dvCardImage(@PathParam("dataverseId") Long dataverseId, @Cont // is too expensive! Instead we are now selecting an available thumbnail and // giving the dataset card a direct link to that file thumbnail. -- L.A., 4.2.2 /* - private DataFileIO getThumbnailForDatasetVersion(DatasetVersion datasetVersion) { + private StorageIO getThumbnailForDatasetVersion(DatasetVersion datasetVersion) { logger.info("entering getThumbnailForDatasetVersion()"); - DataFileIO thumbnailDataAccess = null; + StorageIO thumbnailDataAccess = null; if (datasetVersion != null) { List fileMetadatas = datasetVersion.getFileMetadatas(); @@ -749,7 +749,7 @@ private DataFileIO getThumbnailForDatasetVersion(DatasetVersion datasetVersion) if (dataFile != null && dataFile.isImage()) { try { - DataFileIO dataAccess = dataFile.getDataFileIO(); + StorageIO dataAccess = dataFile.getStorageIO(); if (dataAccess != null && dataAccess.isLocalFile()) { dataAccess.open(); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/BundleDownloadInstanceWriter.java b/src/main/java/edu/harvard/iq/dataverse/api/BundleDownloadInstanceWriter.java index cd3fee51fbd..c306da51231 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/BundleDownloadInstanceWriter.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/BundleDownloadInstanceWriter.java @@ -56,7 +56,7 @@ public void writeTo(BundleDownloadInstance di, Class clazz, Type type, Annota if (di.getDownloadInfo() != null && di.getDownloadInfo().getDataFile() != null) { DataAccessRequest daReq = new DataAccessRequest(); DataFile sf = di.getDownloadInfo().getDataFile(); - DataFileIO accessObject = DataAccess.getDataFileIO(sf, daReq); + StorageIO accessObject = DataAccess.getStorageIO(sf, daReq); if (accessObject != null) { accessObject.open(); @@ -94,7 +94,7 @@ public void writeTo(BundleDownloadInstance di, Class clazz, Type type, Annota // Now, the original format: String origFormat = null; try { - DataFileIO accessObjectOrig = StoredOriginalFile.retreive(accessObject); //.retrieve(sf, (FileAccessIO) accessObject); + StorageIO accessObjectOrig = StoredOriginalFile.retreive(accessObject); //.retrieve(sf, (FileAccessIO) accessObject); if (accessObjectOrig != null) { instream = accessObjectOrig.getInputStream(); if (instream != null) { @@ -127,7 +127,7 @@ public void writeTo(BundleDownloadInstance di, Class clazz, Type type, Annota // add an RData version: if (!"application/x-rlang-transport".equals(origFormat)) { try { - DataFileIO accessObjectRdata = DataConverter.performFormatConversion(sf, accessObject, + StorageIO accessObjectRdata = DataConverter.performFormatConversion(sf, accessObject, "RData", "application/x-rlang-transport"); if (accessObjectRdata != null) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriter.java b/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriter.java index 8dd56a14c9b..de033c33651 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriter.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriter.java @@ -64,36 +64,36 @@ public void writeTo(DownloadInstance di, Class clazz, Type type, Annotation[] DataFile dataFile = di.getDownloadInfo().getDataFile(); - DataFileIO dataFileIO = DataAccess.getDataFileIO(dataFile, daReq); + StorageIO storageIO = DataAccess.getStorageIO(dataFile, daReq); - if (dataFileIO != null) { - dataFileIO.open(); + if (storageIO != null) { + storageIO.open(); if (di.getConversionParam() != null) { // Image Thumbnail and Tabular data conversion: // NOTE: only supported on local files, as of 4.0.2! - // NOTE: should be supported on all files for which DataFileIO drivers + // NOTE: should be supported on all files for which StorageIO drivers // are available (but not on harvested files1) -- L.A. 4.6.2 if (di.getConversionParam().equals("imageThumb") && !dataFile.isHarvested()) { if ("".equals(di.getConversionParamValue())) { - dataFileIO = ImageThumbConverter.getImageThumbnailAsInputStream(dataFileIO, ImageThumbConverter.DEFAULT_THUMBNAIL_SIZE); + storageIO = ImageThumbConverter.getImageThumbnailAsInputStream(storageIO, ImageThumbConverter.DEFAULT_THUMBNAIL_SIZE); } else { try { int size = new Integer(di.getConversionParamValue()).intValue(); if (size > 0) { - dataFileIO = ImageThumbConverter.getImageThumbnailAsInputStream(dataFileIO, size); + storageIO = ImageThumbConverter.getImageThumbnailAsInputStream(storageIO, size); } } catch (java.lang.NumberFormatException ex) { - dataFileIO = ImageThumbConverter.getImageThumbnailAsInputStream(dataFileIO, ImageThumbConverter.DEFAULT_THUMBNAIL_SIZE); + storageIO = ImageThumbConverter.getImageThumbnailAsInputStream(storageIO, ImageThumbConverter.DEFAULT_THUMBNAIL_SIZE); } // and, since we now have tabular data files that can // have thumbnail previews... obviously, we don't want to // add the variable header to the image stream! - dataFileIO.setNoVarHeader(Boolean.TRUE); - dataFileIO.setVarHeader(null); + storageIO.setNoVarHeader(Boolean.TRUE); + storageIO.setVarHeader(null); } } else if (dataFile.isTabularData()) { logger.fine("request for tabular data download;"); @@ -104,16 +104,16 @@ public void writeTo(DownloadInstance di, Class clazz, Type type, Annotation[] if (di.getConversionParam().equals("noVarHeader")) { logger.fine("tabular data with no var header requested"); - dataFileIO.setNoVarHeader(Boolean.TRUE); - dataFileIO.setVarHeader(null); + storageIO.setNoVarHeader(Boolean.TRUE); + storageIO.setVarHeader(null); } else if (di.getConversionParam().equals("format")) { // Conversions, and downloads of "stored originals" are - // now supported on all DataFiles for which DataFileIO + // now supported on all DataFiles for which StorageIO // access drivers are available. if ("original".equals(di.getConversionParamValue())) { logger.fine("stored original of an ingested file requested"); - dataFileIO = StoredOriginalFile.retreive(dataFileIO); + storageIO = StoredOriginalFile.retreive(storageIO); } else { // Other format conversions: logger.fine("format conversion on a tabular file requested ("+di.getConversionParamValue()+")"); @@ -123,9 +123,9 @@ public void writeTo(DownloadInstance di, Class clazz, Type type, Annotation[] // (this shouldn't happen in real life - but just in case): requestedMimeType = "application/octet-stream"; } - dataFileIO = + storageIO = DataConverter.performFormatConversion(dataFile, - dataFileIO, + storageIO, di.getConversionParamValue(), requestedMimeType); } } else if (di.getConversionParam().equals("subset")) { @@ -165,7 +165,7 @@ public void writeTo(DownloadInstance di, Class clazz, Type type, Annotation[] try { File tempSubsetFile = File.createTempFile("tempSubsetFile", ".tmp"); TabularSubsetGenerator tabularSubsetGenerator = new TabularSubsetGenerator(); - tabularSubsetGenerator.subsetFile(dataFileIO.getInputStream(), tempSubsetFile.getAbsolutePath(), variablePositionIndex, dataFile.getDataTable().getCaseQuantity(), "\t"); + tabularSubsetGenerator.subsetFile(storageIO.getInputStream(), tempSubsetFile.getAbsolutePath(), variablePositionIndex, dataFile.getDataTable().getCaseQuantity(), "\t"); if (tempSubsetFile.exists()) { FileInputStream subsetStream = new FileInputStream(tempSubsetFile); @@ -176,7 +176,7 @@ public void writeTo(DownloadInstance di, Class clazz, Type type, Annotation[] subsetVariableHeader = subsetVariableHeader.concat("\n"); subsetStreamIO.setVarHeader(subsetVariableHeader); - String tabularFileName = dataFileIO.getFileName(); + String tabularFileName = storageIO.getFileName(); if (tabularFileName != null && tabularFileName.endsWith(".tab")) { tabularFileName = tabularFileName.replaceAll("\\.tab$", "-subset.tab"); @@ -187,13 +187,13 @@ public void writeTo(DownloadInstance di, Class clazz, Type type, Annotation[] } subsetStreamIO.setFileName(tabularFileName); - subsetStreamIO.setMimeType(dataFileIO.getMimeType()); - dataFileIO = subsetStreamIO; + subsetStreamIO.setMimeType(storageIO.getMimeType()); + storageIO = subsetStreamIO; } else { - dataFileIO = null; + storageIO = null; } } catch (IOException ioex) { - dataFileIO = null; + storageIO = null; } } } else { @@ -203,17 +203,17 @@ public void writeTo(DownloadInstance di, Class clazz, Type type, Annotation[] } - if (dataFileIO == null) { + if (storageIO == null) { throw new WebApplicationException(Response.Status.SERVICE_UNAVAILABLE); } } - InputStream instream = dataFileIO.getInputStream(); + InputStream instream = storageIO.getInputStream(); if (instream != null) { // headers: - String fileName = dataFileIO.getFileName(); - String mimeType = dataFileIO.getMimeType(); + String fileName = storageIO.getFileName(); + String mimeType = storageIO.getMimeType(); // Provide both the "Content-disposition" and "Content-Type" headers, // to satisfy the widest selection of browsers out there. @@ -223,8 +223,8 @@ public void writeTo(DownloadInstance di, Class clazz, Type type, Annotation[] long contentSize; boolean useChunkedTransfer = false; - //if ((contentSize = getFileSize(di, dataFileIO.getVarHeader())) > 0) { - if ((contentSize = getContentSize(dataFileIO)) > 0) { + //if ((contentSize = getFileSize(di, storageIO.getVarHeader())) > 0) { + if ((contentSize = getContentSize(storageIO)) > 0) { logger.fine("Content size (retrieved from the AccessObject): "+contentSize); httpHeaders.add("Content-Length", contentSize); } else { @@ -243,13 +243,13 @@ public void writeTo(DownloadInstance di, Class clazz, Type type, Annotation[] // any extra content, such as the variable header for the // subsettable files: - if (dataFileIO.getVarHeader() != null) { - if (dataFileIO.getVarHeader().getBytes().length > 0) { + if (storageIO.getVarHeader() != null) { + if (storageIO.getVarHeader().getBytes().length > 0) { if (useChunkedTransfer) { - String chunkSizeLine = String.format("%x\r\n", dataFileIO.getVarHeader().getBytes().length); + String chunkSizeLine = String.format("%x\r\n", storageIO.getVarHeader().getBytes().length); outstream.write(chunkSizeLine.getBytes()); } - outstream.write(dataFileIO.getVarHeader().getBytes()); + outstream.write(storageIO.getVarHeader().getBytes()); if (useChunkedTransfer) { outstream.write(chunkClose); } @@ -321,7 +321,7 @@ private boolean isPreprocessedMetadataDownload(DownloadInstance downloadInstance return downloadInstance.getConversionParam().equals("format") && downloadInstance.getConversionParamValue().equals("prep"); } - private long getContentSize(DataFileIO accessObject) { + private long getContentSize(StorageIO accessObject) { long contentSize = 0; if (accessObject.getSize() > -1) { diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/DataAccess.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/DataAccess.java index 96817232fc7..10ae970cf24 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/DataAccess.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/DataAccess.java @@ -36,15 +36,15 @@ public DataAccess() { // or DEFFAULT_STORAGE_DRIVER_IDENTIFIER = s3 public static final String DEFAULT_STORAGE_DRIVER_IDENTIFIER = System.getProperty("dataverse.files.storage-driver-id"); - // The getDataFileIO() methods initialize DataFileIO objects for + // The getStorageIO() methods initialize StorageIO objects for // datafiles that are already saved using one of the supported Dataverse // DataAccess IO drivers. - public static DataFileIO getDataFileIO(T dvObject) throws IOException { - return getDataFileIO(dvObject, null); + public static StorageIO getStorageIO(T dvObject) throws IOException { + return getStorageIO(dvObject, null); } //passing DVObject instead of a datafile to accomodate for use of datafiles as well as datasets - public static DataFileIO getDataFileIO(T dvObject, DataAccessRequest req) throws IOException { + public static StorageIO getStorageIO(T dvObject, DataAccessRequest req) throws IOException { if (dvObject == null || dvObject.getStorageIdentifier() == null @@ -53,7 +53,7 @@ public static DataFileIO getDataFileIO(T dvObject, DataA } if (dvObject.getStorageIdentifier().startsWith("file://") - || (!dvObject.getStorageIdentifier().matches("^[a-z][a-z]*://.*"))) { + || (!dvObject.getStorageIdentifier().matches("^[a-z][a-z0-9]*://.*"))) { return new FileAccessIO<>(dvObject, req); } else if (dvObject.getStorageIdentifier().startsWith("swift://")){ return new SwiftAccessIO<>(dvObject, req); @@ -75,19 +75,19 @@ public static DataFileIO getDataFileIO(T dvObject, DataA // createDataAccessObject() methods create a *new*, empty DataAccess objects, // for saving new, not yet saved datafiles. - public static DataFileIO createNewDataFileIO(T dvObject, String storageTag) throws IOException { + public static StorageIO createNewStorageIO(T dvObject, String storageTag) throws IOException { - return createNewDataFileIO(dvObject, storageTag, DEFAULT_STORAGE_DRIVER_IDENTIFIER); + return createNewStorageIO(dvObject, storageTag, DEFAULT_STORAGE_DRIVER_IDENTIFIER); } - public static DataFileIO createNewDataFileIO(T dvObject, String storageTag, String driverIdentifier) throws IOException { + public static StorageIO createNewStorageIO(T dvObject, String storageTag, String driverIdentifier) throws IOException { if (dvObject == null || storageTag == null || storageTag.isEmpty()) { throw new IOException("getDataAccessObject: null or invalid datafile."); } - DataFileIO dataFileIO = null; + StorageIO storageIO = null; dvObject.setStorageIdentifier(storageTag); @@ -96,18 +96,17 @@ public static DataFileIO createNewDataFileIO(T dvObject, } if (driverIdentifier.equals("file")) { - dataFileIO = new FileAccessIO<>(dvObject, null); + storageIO = new FileAccessIO<>(dvObject, null); } else if (driverIdentifier.equals("swift")) { - dataFileIO = new SwiftAccessIO<>(dvObject, null); + storageIO = new SwiftAccessIO<>(dvObject, null); } else if (driverIdentifier.equals("s3")) { - dataFileIO = new S3AccessIO<>(dvObject, null); - + storageIO = new S3AccessIO<>(dvObject, null); } else { throw new IOException("createDataAccessObject: Unsupported storage method " + driverIdentifier); } - dataFileIO.open(DataAccessOption.WRITE_ACCESS); - return dataFileIO; + storageIO.open(DataAccessOption.WRITE_ACCESS); + return storageIO; } diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/DataConverter.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/DataConverter.java index 725e380dac4..f9009fdf280 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/DataConverter.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/DataConverter.java @@ -71,7 +71,7 @@ public DataConverter() { public static String SERVICE_REQUEST_CONVERT = "convert"; - public static DataFileIO performFormatConversion(DataFile file, DataFileIO dataFileIO, String formatRequested, String formatType) { + public static StorageIO performFormatConversion(DataFile file, StorageIO storageIO, String formatRequested, String formatType) { if (!file.isTabularData()) { return null; } @@ -80,7 +80,7 @@ public static DataFileIO performFormatConversion(DataFile file, DataFi // we don't need to do anything: if (formatRequested.equals(FILE_TYPE_TAB) && file.getContentType().equals("text/tab-separated-values")) { - return dataFileIO; + return storageIO; } InputStream convertedFileStream = null; @@ -89,8 +89,8 @@ public static DataFileIO performFormatConversion(DataFile file, DataFi // We may already have a cached copy of this // format: try { - convertedFileStream = Channels.newInputStream((ReadableByteChannel) dataFileIO.openAuxChannel(formatRequested)); - convertedFileSize = dataFileIO.getAuxObjectSize(formatRequested); + convertedFileStream = Channels.newInputStream((ReadableByteChannel) storageIO.openAuxChannel(formatRequested)); + convertedFileSize = storageIO.getAuxObjectSize(formatRequested); } catch (IOException ioex) { logger.fine("No cached copy for file format "+formatRequested+", file "+file.getStorageIdentifier()); convertedFileStream = null; @@ -104,7 +104,7 @@ public static DataFileIO performFormatConversion(DataFile file, DataFi boolean tempFilesRequired = false; try { - Path tabFilePath = dataFileIO.getFileSystemPath(); + Path tabFilePath = storageIO.getFileSystemPath(); tabFile = tabFilePath.toFile(); } catch (UnsupportedDataAccessOperationException uoex) { // this means there is no direct filesystem path for this object; it's ok! @@ -119,15 +119,15 @@ public static DataFileIO performFormatConversion(DataFile file, DataFi ReadableByteChannel tabFileChannel = null; try { logger.fine("opening datafFileIO for the source tabular file..."); - dataFileIO.open(); - tabFileChannel = dataFileIO.getReadChannel(); + storageIO.open(); + tabFileChannel = storageIO.getReadChannel(); FileChannel tempFileChannel; tabFile = File.createTempFile("tempTabFile", ".tmp"); tempFileChannel = new FileOutputStream(tabFile).getChannel(); - tempFileChannel.transferFrom(tabFileChannel, 0, dataFileIO.getSize()); + tempFileChannel.transferFrom(tabFileChannel, 0, storageIO.getSize()); } catch (IOException ioex) { - logger.warning("caught IOException trying to store tabular file " + dataFileIO.getDataFile().getStorageIdentifier() + " as a temp file."); + logger.warning("caught IOException trying to store tabular file " + storageIO.getDataFile().getStorageIdentifier() + " as a temp file."); return null; } @@ -144,7 +144,7 @@ public static DataFileIO performFormatConversion(DataFile file, DataFi if (formatConvertedFile != null && formatConvertedFile.exists()) { try { - dataFileIO.savePathAsAux(Paths.get(formatConvertedFile.getAbsolutePath()), formatRequested); + storageIO.savePathAsAux(Paths.get(formatConvertedFile.getAbsolutePath()), formatRequested); } catch (IOException ex) { logger.warning("failed to save cached format " + formatRequested + " for " + file.getStorageIdentifier()); @@ -177,7 +177,7 @@ public static DataFileIO performFormatConversion(DataFile file, DataFi inputStreamIO.setMimeType(formatType); - String fileName = dataFileIO.getFileName(); + String fileName = storageIO.getFileName(); if (fileName == null || fileName.isEmpty()) { fileName = "f" + file.getId().toString(); } diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/DataFileZipper.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/DataFileZipper.java index a57251d7d4a..49f8b68b3c1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/DataFileZipper.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/DataFileZipper.java @@ -113,7 +113,7 @@ public void zipFiles(List files, OutputStream outstream, String fileMa DataFile file = (DataFile) iter.next(); DataAccessRequest daReq = new DataAccessRequest(); - DataFileIO accessObject = DataAccess.createDataAccessObject(file, daReq); + StorageIO accessObject = DataAccess.createDataAccessObject(file, daReq); if (accessObject != null) { accessObject.open(); @@ -221,7 +221,7 @@ public long addFileToZipStream(DataFile dataFile) throws IOException { boolean createManifest = fileManifest != null; DataAccessRequest daReq = new DataAccessRequest(); - DataFileIO accessObject = DataAccess.getDataFileIO(dataFile, daReq); + StorageIO accessObject = DataAccess.getStorageIO(dataFile, daReq); if (accessObject != null) { accessObject.open(); diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java index bf38d827dbe..5f793c00381 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java @@ -47,7 +47,7 @@ import java.util.ArrayList; -public class FileAccessIO extends DataFileIO { +public class FileAccessIO extends StorageIO { public FileAccessIO () { this(null); @@ -138,14 +138,14 @@ public void open (DataAccessOption... options) throws IOException { //TODO: do we really need to do anything here? should we return the dataset directory? dataset = this.getDataset(); if (isReadAccess) { - - FileInputStream fin = openLocalFileAsInputStream(); - Path path= dataset.getFileSystemDirectory(); - if (path == null) { - throw new IOException("Failed to locate Dataset"+dataset.getIdentifier()); - } - - this.setInputStream(fin); + //TODO: Not necessary for dataset as there is no files associated with this + // FileInputStream fin = openLocalFileAsInputStream(); +// Path path= dataset.getFileSystemDirectory(); +// if (path == null) { +// throw new IOException("Failed to locate Dataset"+dataset.getIdentifier()); +// } +// +// this.setInputStream(fin); } else if (isWriteAccess) { //this checks whether a directory for a dataset exists if (dataset.getFileSystemDirectory() != null && !Files.exists(dataset.getFileSystemDirectory())) { @@ -487,16 +487,14 @@ private String getDatasetDirectory() throws IOException { throw new IOException("No DvObject defined in the Data Access Object"); } - //TODO: Is this Important? -// if (dvObject.getOwner() == null) { -// throw new IOException("Data Access: no parent defined this Object"); -// } Path datasetDirectoryPath=null; if (dvObject instanceof Dataset) { datasetDirectoryPath = this.getDataset().getFileSystemDirectory(); } else if (dvObject instanceof DataFile) { datasetDirectoryPath = this.getDataFile().getOwner().getFileSystemDirectory(); + } else if (dvObject instanceof Dataverse) { + throw new IOException("FileAccessIO: Dataverses are not a supported dvObject"); } if (datasetDirectoryPath == null) { diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java index 7685e270adc..eeb098770e0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java @@ -76,21 +76,21 @@ public static boolean isThumbnailAvailable(DataFile file, int size) { try { - DataFileIO dataFileIO = file.getDataFileIO(); - return isThumbnailAvailable(dataFileIO, size); + StorageIO storageIO = file.getStorageIO(); + return isThumbnailAvailable(storageIO, size); } catch (IOException ioEx) { return false; } } - private static boolean isThumbnailAvailable(DataFileIO dataFileIO, int size) { + private static boolean isThumbnailAvailable(StorageIO storageIO, int size) { - if (dataFileIO == null || dataFileIO.getDvObject()== null) { + if (storageIO == null || storageIO.getDvObject()== null) { return false; } - DataFile file = dataFileIO.getDataFile(); + DataFile file = storageIO.getDataFile(); // if thumbnails are not even supported on this file type, no need // to check anything else: @@ -105,18 +105,18 @@ private static boolean isThumbnailAvailable(DataFileIO dataFileIO, int return false; } - if (isThumbnailCached(dataFileIO, size)) { + if (isThumbnailCached(storageIO, size)) { return true; } logger.fine("Checking for thumbnail, file type: " + file.getContentType()); if (file.getContentType().substring(0, 6).equalsIgnoreCase("image/")) { - return generateImageThumbnail(dataFileIO, size); + return generateImageThumbnail(storageIO, size); } else if (file.getContentType().equalsIgnoreCase("application/pdf")) { - return generatePDFThumbnail(dataFileIO, size); + return generatePDFThumbnail(storageIO, size); } else if (file.getContentType().equalsIgnoreCase("application/zipped-shapefile") || (file.isTabularData() && file.hasGeospatialTag())) { - return generateWorldMapThumbnail(dataFileIO, size); + return generateWorldMapThumbnail(storageIO, size); } return false; @@ -133,10 +133,10 @@ private static boolean isThumbnailAvailable(DataFileIO dataFileIO, int // Note that this method is mainly used by the data access API methods. // Whenever a page needs a thumbnail, we prefer to rely on the Base64 // string version. - public static InputStreamIO getImageThumbnailAsInputStream(DataFileIO dataFileIO, int size) { + public static InputStreamIO getImageThumbnailAsInputStream(StorageIO storageIO, int size) { logger.fine("entering getImageThumb, size " + size); - if (!isThumbnailAvailable(dataFileIO, size)) { + if (!isThumbnailAvailable(storageIO, size)) { return null; } @@ -145,20 +145,20 @@ public static InputStreamIO getImageThumbnailAsInputStream(DataFileIO InputStream cachedThumbnailInputStream = null; try { - dataFileIO.open(); - Channel cachedThumbnailChannel = dataFileIO.openAuxChannel(THUMBNAIL_SUFFIX + size); + storageIO.open(); + Channel cachedThumbnailChannel = storageIO.openAuxChannel(THUMBNAIL_SUFFIX + size); if (cachedThumbnailChannel == null) { logger.warning("Null channel for aux object " + THUMBNAIL_SUFFIX + size); return null; } cachedThumbnailInputStream = Channels.newInputStream((ReadableByteChannel) cachedThumbnailChannel); - int cachedThumbnailSize = (int) dataFileIO.getAuxObjectSize(THUMBNAIL_SUFFIX + size); + int cachedThumbnailSize = (int) storageIO.getAuxObjectSize(THUMBNAIL_SUFFIX + size); InputStreamIO inputStreamIO = new InputStreamIO(cachedThumbnailInputStream, cachedThumbnailSize); inputStreamIO.setMimeType(THUMBNAIL_MIME_TYPE); - String fileName = dataFileIO.getFileName(); + String fileName = storageIO.getFileName(); if (fileName != null) { fileName = fileName.replaceAll("\\.[^\\.]*$", ".png"); inputStreamIO.setFileName(fileName); @@ -175,9 +175,9 @@ public static InputStreamIO getImageThumbnailAsInputStream(DataFileIO } } - private static boolean generatePDFThumbnail(DataFileIO dataFileIO, int size) { - if (isPdfFileOverSizeLimit(dataFileIO.getDataFile().getFilesize())) { - logger.fine("Image file too large (" + dataFileIO.getDataFile().getFilesize() + " bytes) - skipping"); + private static boolean generatePDFThumbnail(StorageIO storageIO, int size) { + if (isPdfFileOverSizeLimit(storageIO.getDataFile().getFilesize())) { + logger.fine("Image file too large (" + storageIO.getDataFile().getFilesize() + " bytes) - skipping"); return false; } @@ -200,7 +200,7 @@ private static boolean generatePDFThumbnail(DataFileIO dataFileIO, int boolean tempFilesRequired = false; try { - Path pdfFilePath = dataFileIO.getFileSystemPath(); + Path pdfFilePath = storageIO.getFileSystemPath(); sourcePdfFile = pdfFilePath.toFile(); logger.fine("Opened the source pdf file as a local File."); } catch (UnsupportedDataAccessOperationException uoex) { @@ -217,11 +217,11 @@ private static boolean generatePDFThumbnail(DataFileIO dataFileIO, int ReadableByteChannel pdfFileChannel; try { - dataFileIO.open(); - //inputStream = dataFileIO.getInputStream(); - pdfFileChannel = dataFileIO.getReadChannel(); + storageIO.open(); + //inputStream = storageIO.getInputStream(); + pdfFileChannel = storageIO.getReadChannel(); } catch (IOException ioex) { - logger.warning("caught IOException trying to open an input stream for " + dataFileIO.getDataFile().getStorageIdentifier()); + logger.warning("caught IOException trying to open an input stream for " + storageIO.getDataFile().getStorageIdentifier()); return false; } @@ -231,7 +231,7 @@ private static boolean generatePDFThumbnail(DataFileIO dataFileIO, int tempFile = File.createTempFile("tempFileToRescale", ".tmp"); tempFileChannel = new FileOutputStream(tempFile).getChannel(); - tempFileChannel.transferFrom(pdfFileChannel, 0, dataFileIO.getSize()); + tempFileChannel.transferFrom(pdfFileChannel, 0, storageIO.getSize()); } catch (IOException ioex) { logger.warning("GenerateImageThumb: failed to save pdf bytes in a temporary file."); return false; @@ -254,7 +254,7 @@ private static boolean generatePDFThumbnail(DataFileIO dataFileIO, int if (tempFilesRequired) { try { logger.fine("attempting to save generated pdf thumbnail, as AUX file "+THUMBNAIL_SUFFIX + size); - dataFileIO.savePathAsAux(Paths.get(imageThumbFileName), THUMBNAIL_SUFFIX + size); + storageIO.savePathAsAux(Paths.get(imageThumbFileName), THUMBNAIL_SUFFIX + size); } catch (IOException ioex) { logger.warning("failed to save generated pdf thumbnail, as AUX file "+THUMBNAIL_SUFFIX + size+"!"); return false; @@ -264,24 +264,21 @@ private static boolean generatePDFThumbnail(DataFileIO dataFileIO, int return true; } - private static boolean generateImageThumbnail(DataFileIO dataFileIO, int size) { + private static boolean generateImageThumbnail(StorageIO storageIO, int size) { - if (isImageOverSizeLimit(dataFileIO.getDataFile().getFilesize())) { + if (isImageOverSizeLimit(storageIO.getDataFile().getFilesize())) { logger.fine("Image file too large - skipping"); return false; } - - InputStream inputStream; try { - dataFileIO.open(); - inputStream = dataFileIO.getInputStream(); + storageIO.open(); } catch (IOException ioex) { - logger.warning("caught IOException trying to open an input stream for " + dataFileIO.getDataFile().getStorageIdentifier()); + logger.warning("caught IOException trying to open an input stream for " + storageIO.getDataFile().getStorageIdentifier() + ioex); return false; } - return generateImageThumbnailFromInputStream(dataFileIO, size, inputStream); + return generateImageThumbnailFromInputStream(storageIO, size, storageIO.getInputStream()); } /* @@ -295,21 +292,21 @@ private static boolean generateImageThumbnail(DataFileIO dataFileIO, i * Also note that it works the exact same way for tabular-mapped-as-worldmap * files as well. */ - private static boolean generateWorldMapThumbnail(DataFileIO dataFileIO, int size) { + private static boolean generateWorldMapThumbnail(StorageIO storageIO, int size) { InputStream worldMapImageInputStream = null; try { - dataFileIO.open(); + storageIO.open(); - Channel worldMapImageChannel = dataFileIO.openAuxChannel(WORLDMAP_IMAGE_SUFFIX); + Channel worldMapImageChannel = storageIO.openAuxChannel(WORLDMAP_IMAGE_SUFFIX); if (worldMapImageChannel == null) { logger.warning("Could not open channel for aux ."+ WORLDMAP_IMAGE_SUFFIX + " object; (" + size + ")"); return false; } worldMapImageInputStream = Channels.newInputStream((ReadableByteChannel) worldMapImageChannel); - long worldMapImageSize = dataFileIO.getAuxObjectSize(WORLDMAP_IMAGE_SUFFIX); + long worldMapImageSize = storageIO.getAuxObjectSize(WORLDMAP_IMAGE_SUFFIX); if (isImageOverSizeLimit(worldMapImageSize)) { logger.fine("WorldMap image too large - skipping"); @@ -321,23 +318,23 @@ private static boolean generateWorldMapThumbnail(DataFileIO dataFileIO return false; } catch (IOException ioex) { - logger.warning("caught IOException trying to open an input stream for worldmap .img file (" + dataFileIO.getDataFile().getStorageIdentifier() + ")"); + logger.warning("caught IOException trying to open an input stream for worldmap .img file (" + storageIO.getDataFile().getStorageIdentifier() + ")"); return false; } - return generateImageThumbnailFromInputStream(dataFileIO, size, worldMapImageInputStream); + return generateImageThumbnailFromInputStream(storageIO, size, worldMapImageInputStream); } /* * This is the actual workhorse method that does the rescaling of the full * size image: */ - private static boolean generateImageThumbnailFromInputStream(DataFileIO dataFileIO, int size, InputStream inputStream) { + private static boolean generateImageThumbnailFromInputStream(StorageIO storageIO, int size, InputStream inputStream) { BufferedImage fullSizeImage; try { - logger.fine("attempting to read the image file with ImageIO.read(InputStream), " + dataFileIO.getDataFile().getStorageIdentifier()); + logger.fine("attempting to read the image file with ImageIO.read(InputStream), " + storageIO.getDataFile().getStorageIdentifier()); fullSizeImage = ImageIO.read(inputStream); } catch (IOException ioex) { logger.warning("Caught exception attempting to read the image file with ImageIO.read(InputStream)"); @@ -352,7 +349,7 @@ private static boolean generateImageThumbnailFromInputStream(DataFileIO dataFileIO, int size) { + private static boolean isThumbnailCached(StorageIO storageIO, int size) { boolean cached; try { - cached = dataFileIO.isAuxObjectCached(THUMBNAIL_SUFFIX + size); + cached = storageIO.isAuxObjectCached(THUMBNAIL_SUFFIX + size); } catch (IOException ioex) { - logger.fine("caught IO exception while checking for a cached thumbnail (file "+dataFileIO.getDataFile().getStorageIdentifier()+")"); + logger.fine("caught IO exception while checking for a cached thumbnail (file "+storageIO.getDataFile().getStorageIdentifier()+")"); return false; } if (cached) { - logger.fine("thumbnail is cached for "+dataFileIO.getDataFile().getStorageIdentifier()); + logger.fine("thumbnail is cached for "+storageIO.getDataFile().getStorageIdentifier()); } else { - logger.fine("no thumbnail cached for "+dataFileIO.getDataFile().getStorageIdentifier()); + logger.fine("no thumbnail cached for "+storageIO.getDataFile().getStorageIdentifier()); } @@ -439,22 +436,22 @@ public static String getImageThumbnailAsBase64(DataFile file, int size) { logger.fine("entering getImageThumbnailAsBase64, size " + size+", for "+file.getStorageIdentifier()); try { - DataFileIO dataFileIO = file.getDataFileIO(); + StorageIO storageIO = file.getStorageIO(); - if (!isThumbnailAvailable(dataFileIO, size)) { + if (!isThumbnailAvailable(storageIO, size)) { logger.info("no thumbnail available for "+file.getStorageIdentifier()); return null; } - //dataFileIO.open(); - Channel cachedThumbnailChannel = dataFileIO.openAuxChannel(THUMBNAIL_SUFFIX + size); + //storageIO.open(); + Channel cachedThumbnailChannel = storageIO.openAuxChannel(THUMBNAIL_SUFFIX + size); if (cachedThumbnailChannel == null) { logger.warning("Null channel for aux object "+ THUMBNAIL_SUFFIX + size); return null; } InputStream cachedThumbnailInputStream = Channels.newInputStream((ReadableByteChannel) cachedThumbnailChannel); - int cachedThumbnailSize = (int)dataFileIO.getAuxObjectSize(THUMBNAIL_SUFFIX + size); + int cachedThumbnailSize = (int)storageIO.getAuxObjectSize(THUMBNAIL_SUFFIX + size); return getImageAsBase64FromInputStream(cachedThumbnailInputStream, cachedThumbnailSize); diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/InputStreamIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/InputStreamIO.java index 3db4e650e4b..80ef0325a50 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/InputStreamIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/InputStreamIO.java @@ -20,7 +20,7 @@ * * @author Leonid Andreev */ -public class InputStreamIO extends DataFileIO { +public class InputStreamIO extends StorageIO { private static final Logger logger = Logger.getLogger("edu.harvard.iq.dataverse.dataaccess.InputStreamIO"); diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java index 54b528214ca..a2b02c2a5c2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java @@ -42,7 +42,7 @@ /* Experimental Amazon AWS S3 driver */ -public class S3AccessIO extends DataFileIO { +public class S3AccessIO extends StorageIO { private static final Logger logger = Logger.getLogger("edu.harvard.iq.dataverse.dataaccess.S3AccessIO"); @@ -145,7 +145,7 @@ public void open(DataAccessOption... options) throws IOException { } - // DataFileIO method for copying a local Path (for ex., a temp file), into this DataAccess location: + // StorageIO method for copying a local Path (for ex., a temp file), into this DataAccess location: //FIXME: Incomplete @Override diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/DataFileIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java similarity index 98% rename from src/main/java/edu/harvard/iq/dataverse/dataaccess/DataFileIO.java rename to src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java index 747fad184af..9f22ae742e2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/DataFileIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java @@ -48,17 +48,17 @@ * @param what it writes */ -public abstract class DataFileIO { +public abstract class StorageIO { - public DataFileIO() { + public StorageIO() { } - public DataFileIO(T dvObject) { + public StorageIO(T dvObject) { this(dvObject, null); } - public DataFileIO(T dvObject, DataAccessRequest req) { + public StorageIO(T dvObject, DataAccessRequest req) { this.dvObject = dvObject; this.req = req; if (this.req == null) { diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StoredOriginalFile.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StoredOriginalFile.java index bb7f7e71a9d..f5ea17de568 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StoredOriginalFile.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StoredOriginalFile.java @@ -39,10 +39,10 @@ public StoredOriginalFile () { private static final String SAVED_ORIGINAL_FILENAME_EXTENSION = "orig"; - public static DataFileIO retreive(DataFileIO dataFileIO) { + public static StorageIO retreive(StorageIO storageIO) { String originalMimeType; - DataFile dataFile = dataFileIO.getDataFile(); + DataFile dataFile = storageIO.getDataFile(); if (dataFile == null) { return null; @@ -58,9 +58,9 @@ public static DataFileIO retreive(DataFileIO dataFileIO) { InputStreamIO inputStreamIO; try { - dataFileIO.open(); - Channel storedOriginalChannel = dataFileIO.openAuxChannel(SAVED_ORIGINAL_FILENAME_EXTENSION); - storedOriginalSize = dataFileIO.getAuxObjectSize(SAVED_ORIGINAL_FILENAME_EXTENSION); + storageIO.open(); + Channel storedOriginalChannel = storageIO.openAuxChannel(SAVED_ORIGINAL_FILENAME_EXTENSION); + storedOriginalSize = storageIO.getAuxObjectSize(SAVED_ORIGINAL_FILENAME_EXTENSION); inputStreamIO = new InputStreamIO(Channels.newInputStream((ReadableByteChannel) storedOriginalChannel), storedOriginalSize); logger.fine("Opened stored original file as Aux "+SAVED_ORIGINAL_FILENAME_EXTENSION); } catch (IOException ioEx) { @@ -79,7 +79,7 @@ public static DataFileIO retreive(DataFileIO dataFileIO) { inputStreamIO.setMimeType("application/x-unknown"); } - String fileName = dataFileIO.getFileName(); + String fileName = storageIO.getFileName(); if (fileName != null) { if (originalMimeType != null) { String origFileExtension = generateOriginalExtension(originalMimeType); diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIO.java index 12658a0fcc5..1ea94a1ec6a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIO.java @@ -36,7 +36,7 @@ Experimental Swift driver, implemented as part of the Dataverse - Mass Open Cloud collaboration. */ -public class SwiftAccessIO extends DataFileIO { +public class SwiftAccessIO extends StorageIO { private String swiftFolderPath; @@ -122,7 +122,6 @@ public void open(DataAccessOption... options) throws IOException { } else if (dvObject instanceof Dataset) { //we are uploading a dataset related auxilary file //such as a dataset thumbnail or a metadata export - Dataset dataset = this.getDataset(); if (isReadAccess) { //TODO: fix this InputStream fin = openSwiftFileAsInputStream(); @@ -136,7 +135,6 @@ public void open(DataAccessOption... options) throws IOException { swiftFileObject = initializeSwiftFileObject(true); } } else if (dvObject instanceof Dataverse) { - Dataverse dataverse = this.getDataverse(); } else { throw new IOException("Data Access: Invalid DvObject type"); } @@ -144,7 +142,7 @@ public void open(DataAccessOption... options) throws IOException { } - // DataFileIO method for copying a local Path (for ex., a temp file), into this DataAccess location: + // StorageIO method for copying a local Path (for ex., a temp file), into this DataAccess location: @Override public void savePath(Path fileSystemPath) throws IOException { @@ -185,7 +183,6 @@ public void saveInputStream(InputStream inputStream) throws IOException { } try { - swiftFileObject.uploadObject(inputStream); } catch (Exception ioex) { @@ -259,7 +256,8 @@ public long getAuxObjectSize(String auxItemTag) throws IOException { return swiftAuxObject.getContentLength(); } - @Override public Path getAuxObjectAsPath(String auxItemTag) throws IOException { + @Override + public Path getAuxObjectAsPath(String auxItemTag) throws IOException { throw new UnsupportedDataAccessOperationException("SwiftAccessIO: this is a remote DataAccess IO object, its Aux objects have no local filesystem Paths associated with it."); } @@ -333,7 +331,7 @@ public void saveInputStreamAsAux(InputStream inputStream, String auxItemTag) thr } @Override - public ListlistAuxObjects() throws IOException { + public List listAuxObjects() throws IOException { if (this.swiftContainer == null || this.swiftFileObject == null) { throw new IOException("This SwiftAccessIO() hasn't been properly initialized yet."); } @@ -485,71 +483,6 @@ private StoredObject initializeSwiftFileObject(boolean writeAccess, String auxIt } else { throw new IOException("SwiftAccessIO: unknown access mode."); } - // Authenticate with Swift: - - // should we only authenticate when account == null? - - if (this.account == null) { - account = authenticateWithSwift(swiftEndPoint); - } - - /* - The containers created is swiftEndPoint concatenated with the swiftContainerName - property. Creating container with certain names throws 'Unable to create - container' error on Openstack. - Any datafile with http://rdgw storage identifier i.e present on Object - store service endpoint already only needs to look-up for container using - just swiftContainerName which is the concatenated name. - In future, a container for the endpoint can be created and for every - other swiftContainerName Object Store pseudo-folder can be created, which is - not provide by the joss Java swift library as of yet. - */ - - if (storageIdentifier.startsWith("swift://")) { - // An existing swift object; the container must already exist as well. - this.swiftContainer = account.getContainer(swiftContainerName); - } else { - // This is a new object being created. - this.swiftContainer = account.getContainer(swiftFolderPath); //changed from swiftendpoint - } - - if (!this.swiftContainer.exists()) { - if (writeAccess) { - // dataContainer.create(); - try { - //creates a public data container - this.swiftContainer.makePublic(); - } - catch (Exception e){ - //e.printStackTrace(); - logger.warning("Caught exception "+e.getClass()+" while creating a swift container (it's likely not fatal!)"); - } - } else { - // This is a fatal condition - it has to exist, if we were to - // read an existing object! - throw new IOException("SwiftAccessIO: container " + swiftContainerName + " does not exist."); - } - } - - fileObject = this.swiftContainer.getObject(swiftFileName); - - - // If this is the main, primary datafile object (i.e., not an auxiliary - // object for a primary file), we also set the file download url here: - if (auxItemTag == null) { - setRemoteUrl(getSwiftFileURI(fileObject)); - logger.fine(getRemoteUrl() + " success; write mode: "+writeAccess); - } else { - logger.fine("sucessfully opened AUX object "+auxItemTag+" , write mode: "+writeAccess); - } - - if (!writeAccess && !fileObject.exists()) { - throw new FileNotFoundException("SwiftAccessIO: File object " + swiftFileName + " does not exist (Dataverse datafile id: " + dvObject.getId()); - } - - auxFiles = null; - - return fileObject; } else if (dvObject instanceof Dataset) { Dataset dataset = this.getDataset(); @@ -597,82 +530,86 @@ private StoredObject initializeSwiftFileObject(boolean writeAccess, String auxIt } else { throw new IOException("SwiftAccessIO: unknown access mode."); } - // Authenticate with Swift: + } else { + //for future scope, if dataverse is decided to be stored in swift storage containersopen + throw new FileNotFoundException("Error initializing swift object"); + } + // Authenticate with Swift: - // should we only authenticate when account == null? + // should we only authenticate when account == null? - if (this.account == null) { - account = authenticateWithSwift(swiftEndPoint); - } + if (this.account == null) { + account = authenticateWithSwift(swiftEndPoint); + } - /* - The containers created is swiftEndPoint concatenated with the swiftContainerName - property. Creating container with certain names throws 'Unable to create - container' error on Openstack. - Any datafile with http://rdgw storage identifier i.e present on Object - store service endpoint already only needs to look-up for container using - just swiftContainerName which is the concatenated name. - In future, a container for the endpoint can be created and for every - other swiftContainerName Object Store pseudo-folder can be created, which is - not provide by the joss Java swift library as of yet. - */ + /* + The containers created is swiftEndPoint concatenated with the swiftContainerName + property. Creating container with certain names throws 'Unable to create + container' error on Openstack. + Any datafile with http://rdgw storage identifier i.e present on Object + store service endpoint already only needs to look-up for container using + just swiftContainerName which is the concatenated name. + In future, a container for the endpoint can be created and for every + other swiftContainerName Object Store pseudo-folder can be created, which is + not provide by the joss Java swift library as of yet. + */ - if (storageIdentifier.startsWith("swift://")) { - // An existing swift object; the container must already exist as well. - this.swiftContainer = account.getContainer(swiftContainerName); - } else { - // This is a new object being created. - this.swiftContainer = account.getContainer(swiftFolderPath); //changed from swiftendpoint - } + if (storageIdentifier.startsWith("swift://")) { + // An existing swift object; the container must already exist as well. + this.swiftContainer = account.getContainer(swiftContainerName); + } else { + // This is a new object being created. + this.swiftContainer = account.getContainer(swiftFolderPath); //changed from swiftendpoint + } - if (!this.swiftContainer.exists()) { - if (writeAccess) { - // dataContainer.create(); - try { - //creates a public data container - this.swiftContainer.makePublic(); - } - catch (Exception e){ - //e.printStackTrace(); - logger.warning("Caught exception "+e.getClass()+" while creating a swift container (it's likely not fatal!)"); - } - } else { - // This is a fatal condition - it has to exist, if we were to - // read an existing object! - throw new IOException("SwiftAccessIO: container " + swiftContainerName + " does not exist."); - } + if (!this.swiftContainer.exists()) { + if (writeAccess) { + // dataContainer.create(); + try { + //creates a public data container + this.swiftContainer.makePublic(); + } + catch (Exception e){ + //e.printStackTrace(); + logger.warning("Caught exception "+e.getClass()+" while creating a swift container (it's likely not fatal!)"); + } + } else { + // This is a fatal condition - it has to exist, if we were to + // read an existing object! + throw new IOException("SwiftAccessIO: container " + swiftContainerName + " does not exist."); } + } - fileObject = this.swiftContainer.getObject(swiftFileName); + fileObject = this.swiftContainer.getObject(swiftFileName); + // If this is the main, primary datafile object (i.e., not an auxiliary + // object for a primary file), we also set the file download url here: + if (auxItemTag == null && dvObject instanceof DataFile) { + setRemoteUrl(getSwiftFileURI(fileObject)); + logger.fine(getRemoteUrl() + " success; write mode: "+writeAccess); + } else { logger.fine("sucessfully opened AUX object "+auxItemTag+" , write mode: "+writeAccess); + } - if (!writeAccess && !fileObject.exists()) { - throw new FileNotFoundException("SwiftAccessIO: File object " + swiftFileName + " does not exist (Dataverse dataset id: " + dataset.getId()); - } + if (!writeAccess && !fileObject.exists()) { + throw new FileNotFoundException("SwiftAccessIO: DvObject " + swiftFileName + " does not exist (Dataverse dvObject id: " + dvObject.getId()); + } - auxFiles = null; + auxFiles = null; - return fileObject; - //for future scope, if dataverse is decided to be stored in swift storage containersopen - } else { - throw new FileNotFoundException("Error initializing swift object"); - } - + return fileObject; + } private InputStream openSwiftFileAsInputStream() throws IOException { swiftFileObject = initializeSwiftFileObject(false); - - InputStream in = swiftFileObject.downloadObjectAsInputStream(); this.setSize(swiftFileObject.getContentLength()); - return in; + return swiftFileObject.downloadObjectAsInputStream(); } private InputStream openSwiftAuxFileAsInputStream(String auxItemTag) throws IOException { - StoredObject swiftAuxFileObject = initializeSwiftFileObject(false, auxItemTag); - return swiftAuxFileObject.downloadObjectAsInputStream(); + return initializeSwiftFileObject(false, auxItemTag).downloadObjectAsInputStream(); } private StoredObject openSwiftAuxFile(String auxItemTag) throws IOException { @@ -680,9 +617,7 @@ private StoredObject openSwiftAuxFile(String auxItemTag) throws IOException { } private StoredObject openSwiftAuxFile(boolean writeAccess, String auxItemTag) throws IOException { - StoredObject swiftAuxFileObject = initializeSwiftFileObject(writeAccess, auxItemTag); - - return swiftAuxFileObject; + return initializeSwiftFileObject(writeAccess, auxItemTag); } @@ -774,19 +709,17 @@ private boolean isWriteAccessRequested(DataAccessOption... options) throws IOExc } private String getSwiftFileURI(StoredObject fileObject) throws IOException { - String fileUri; try { - fileUri = fileObject.getPublicURL(); + return fileObject.getPublicURL(); } catch (Exception ex) { throw new IOException("SwiftAccessIO: failed to get public URL of the stored object"); } - return fileUri; } @Override public InputStream getAuxFileAsInputStream(String auxItemTag) throws IOException { if (this.isAuxObjectCached(auxItemTag)) { - return openSwiftAuxFile(auxItemTag).downloadObjectAsInputStream(); + return openSwiftAuxFileAsInputStream(auxItemTag); } else { throw new IOException("SwiftAccessIO: Failed to get aux file as input stream"); } @@ -799,11 +732,10 @@ public String getSwiftContainerName() { swiftFolderPathSeparator = "_"; } String authorityNoSlashes = this.getDataFile().getOwner().getAuthority().replace(this.getDataFile().getOwner().getDoiSeparator(), swiftFolderPathSeparator); - String containerName = this.getDataFile().getOwner().getProtocol() + swiftFolderPathSeparator + - authorityNoSlashes.replace(".", swiftFolderPathSeparator) + + return this.getDataFile().getOwner().getProtocol() + swiftFolderPathSeparator + + authorityNoSlashes.replace(".", swiftFolderPathSeparator) + swiftFolderPathSeparator + this.getDataFile().getOwner().getIdentifier(); - return containerName; } } diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/TabularSubsetGenerator.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/TabularSubsetGenerator.java index b11b3454b7b..c97dc747505 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/TabularSubsetGenerator.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/TabularSubsetGenerator.java @@ -89,7 +89,7 @@ public TabularSubsetGenerator (DataFile datafile, List variables) - DataFileIO dataAccess = datafile.getDataFileIO(); + StorageIO dataAccess = datafile.getStorageIO(); if (!dataAccess.isLocalFile()) { throw new IOException("Subsetting is supported on local files only!"); } @@ -721,7 +721,7 @@ public Object[] subsetObjectVector(DataFile dataFile, int column, int columntype throw new IOException("Column "+column+" is out of bounds."); } - DataFileIO dataAccess = dataFile.getDataFileIO(); + StorageIO dataAccess = dataFile.getStorageIO(); if (!dataAccess.isLocalFile()) { throw new IOException("Subsetting is supported on local files only!"); } diff --git a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java index 36ad9dc9df3..35182365d13 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java @@ -5,8 +5,8 @@ import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.FileMetadata; import edu.harvard.iq.dataverse.dataaccess.DataAccess; -import static edu.harvard.iq.dataverse.dataaccess.DataAccess.getDataFileIO; -import edu.harvard.iq.dataverse.dataaccess.DataFileIO; +import static edu.harvard.iq.dataverse.dataaccess.DataAccess.getStorageIO; +import edu.harvard.iq.dataverse.dataaccess.StorageIO; import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter; import edu.harvard.iq.dataverse.util.FileUtil; import java.awt.image.BufferedImage; @@ -43,12 +43,32 @@ public static List getThumbnailCandidates(Dataset dataset, boo return thumbnails; } if (considerDatasetLogoAsCandidate) { - Path path = Paths.get(dataset.getFileSystemDirectory() + File.separator + datasetLogoThumbnail + thumb48addedByImageThumbConverter); - if (Files.exists(path)) { +// Path path = Paths.get(dataset.getFileSystemDirectory() + File.separator + datasetLogoThumbnail + thumb48addedByImageThumbConverter); +// if (Files.exists(path)) { +// logger.fine("Thumbnail created from dataset logo exists!"); +// File file = path.toFile(); +// try { +// byte[] bytes = Files.readAllBytes(file.toPath()); + StorageIO dataAccess = null; + + try{ + dataAccess = DataAccess.getStorageIO(dataset); + } + catch(IOException ioex){ + } + + InputStream in = null; + try { + if (dataAccess.getAuxFileAsInputStream(datasetLogoThumbnail + thumb48addedByImageThumbConverter) != null) { + in = dataAccess.getAuxFileAsInputStream(datasetLogoThumbnail + thumb48addedByImageThumbConverter); + } + } catch (Exception ioex) { + } + + if (in != null) { logger.fine("Thumbnail created from dataset logo exists!"); - File file = path.toFile(); try { - byte[] bytes = Files.readAllBytes(file.toPath()); + byte[] bytes = IOUtils.toByteArray(in); String base64image = Base64.getEncoder().encodeToString(bytes); DatasetThumbnail datasetThumbnail = new DatasetThumbnail(FileUtil.DATA_URI_SCHEME + base64image, null); thumbnails.add(datasetThumbnail); @@ -90,10 +110,10 @@ public static DatasetThumbnail getThumbnail(Dataset dataset, DatasetVersion data return null; } - DataFileIO dataAccess = null; + StorageIO dataAccess = null; try{ - dataAccess = DataAccess.getDataFileIO(dataset); + dataAccess = DataAccess.getStorageIO(dataset); } catch(IOException ioex){ @@ -167,15 +187,15 @@ public static boolean deleteDatasetLogo(Dataset dataset) { return false; } try { - DataFileIO dataFileIO = getDataFileIO(dataset); + StorageIO storageIO = getStorageIO(dataset); - if (dataFileIO == null) { - logger.warning("Null DataFileIO in deleteDatasetLogo()"); + if (storageIO == null) { + logger.warning("Null storageIO in deleteDatasetLogo()"); return false; } - dataFileIO.deleteAuxObject(datasetLogoFilenameFinal); - dataFileIO.deleteAuxObject(datasetLogoThumbnail + thumb48addedByImageThumbConverter); + storageIO.deleteAuxObject(datasetLogoFilenameFinal); + storageIO.deleteAuxObject(datasetLogoThumbnail + thumb48addedByImageThumbConverter); } catch (IOException ex) { logger.info("Failed to delete dataset logo: " + ex.getMessage()); @@ -239,10 +259,10 @@ public static Dataset persistDatasetLogoToStorageAndCreateThumbnail(Dataset data logger.severe(ex.getMessage()); } - DataFileIO dataAccess = null; + StorageIO dataAccess = null; try{ - dataAccess = DataAccess.createNewDataFileIO(dataset,"file"); + dataAccess = DataAccess.createNewStorageIO(dataset,"file"); } catch(IOException ioex){ //TODO: Add a suitable waing message @@ -340,13 +360,21 @@ public static InputStream getThumbnailAsInputStream(Dataset dataset) { * The dataset logo is the file that a user uploads which is *not* one of * the data files. Compare to the datavese logo. We do not save the original * file that is uploaded. Rather, we delete it after first creating at least - * one thumbnail from it. + * one thumbnail from it. Update after #3919: We now keep the original one as well. */ public static boolean isDatasetLogoPresent(Dataset dataset) { if (dataset == null) { return false; } - return Files.exists(Paths.get(dataset.getFileSystemDirectory() + File.separator + datasetLogoFilenameFinal)); + + StorageIO dataAccess = null; + + try { + dataAccess = DataAccess.getStorageIO(dataset); + return dataAccess.isAuxObjectCached(datasetLogoThumbnail + thumb48addedByImageThumbConverter); + } catch (IOException ioex) { + } + return false; } } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDataFileCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDataFileCommand.java index e87e7bb0bae..ef5c7b4b676 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDataFileCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDataFileCommand.java @@ -4,7 +4,7 @@ import edu.harvard.iq.dataverse.search.IndexServiceBean; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; -import edu.harvard.iq.dataverse.dataaccess.DataFileIO; +import edu.harvard.iq.dataverse.dataaccess.StorageIO; import edu.harvard.iq.dataverse.engine.command.AbstractVoidCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; @@ -60,7 +60,7 @@ protected void executeImpl(CommandContext ctxt) throws CommandException { // We need to delete a bunch of physical files, either from the file system, // or from some other storage medium where the datafile is stored, - // via its DataFileIO driver. + // via its StorageIO driver. // First we delete the derivative files, then try to delete the data // file itself; if that // fails, we throw an exception and abort the command without @@ -72,15 +72,15 @@ protected void executeImpl(CommandContext ctxt) throws CommandException { if (!doomed.isHarvested() && !StringUtil.isEmpty(doomed.getStorageIdentifier())) { logger.log(Level.FINE, "Storage identifier for the file: {0}", doomed.getStorageIdentifier()); - DataFileIO dataFileIO = null; + StorageIO storageIO = null; try { - dataFileIO = doomed.getDataFileIO(); + storageIO = doomed.getStorageIO(); } catch (IOException ioex) { throw new CommandExecutionException("Failed to initialize physical access driver.", ioex, this); } - if (dataFileIO != null) { + if (storageIO != null) { // First, delete all the derivative files: // We may have a few extra files associated with this object - @@ -91,19 +91,19 @@ protected void executeImpl(CommandContext ctxt) throws CommandException { // auxiliary files, we'll just leave an error message in the // log file and proceed deleting the database object. try { - dataFileIO.open(); - dataFileIO.deleteAllAuxObjects(); + storageIO.open(); + storageIO.deleteAllAuxObjects(); } catch (IOException ioex) { Logger.getLogger(DeleteDataFileCommand.class.getName()).log(Level.SEVERE, "Error deleting Auxiliary file(s) while deleting DataFile {0}", doomed.getStorageIdentifier()); } // We only want to attempt to delete the main physical file // if it actually exists, on the filesystem or whereever it - // is actually stored by its DataFileIO: + // is actually stored by its StorageIO: boolean physicalFileExists = false; try { - physicalFileExists = dataFileIO.exists(); + physicalFileExists = storageIO.exists(); } catch (IOException ioex) { // We'll assume that an exception here means that the file does not // exist; so we can skip trying to delete it. @@ -112,7 +112,7 @@ protected void executeImpl(CommandContext ctxt) throws CommandException { if (physicalFileExists) { try { - dataFileIO.delete(); + storageIO.delete(); } catch (IOException ex) { // This we will treat as a fatal condition: throw new CommandExecutionException("Error deleting physical file object while deleting DataFile " + doomed.getId() + " from the database.", ex, this); @@ -121,10 +121,10 @@ protected void executeImpl(CommandContext ctxt) throws CommandException { logger.log(Level.FINE, "Successfully deleted physical storage object (file) for the DataFile {0}", doomed.getId()); - // Destroy the dataFileIO object - we will need to purge the + // Destroy the storageIO object - we will need to purge the // DataFile from the database (below), so we don't want to have any // objects in this transaction that reference it: - dataFileIO = null; + storageIO = null; } } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DestroyDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DestroyDatasetCommand.java index fdf49929ac0..903393744d1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DestroyDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DestroyDatasetCommand.java @@ -9,6 +9,7 @@ import edu.harvard.iq.dataverse.RoleAssignment; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import static edu.harvard.iq.dataverse.dataset.DatasetUtil.deleteDatasetLogo; import edu.harvard.iq.dataverse.engine.command.AbstractVoidCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; @@ -76,6 +77,9 @@ protected void executeImpl(CommandContext ctxt) throws CommandException { dfIt.remove(); } + //also, lets delete the uploaded thumbnails! + deleteDatasetLogo(doomed); + // ASSIGNMENTS for (RoleAssignment ra : ctxt.roles().directRoleAssignments(doomed)) { diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ExportService.java b/src/main/java/edu/harvard/iq/dataverse/export/ExportService.java index cb3b70a48ec..b1b4e7646a8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/ExportService.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/ExportService.java @@ -1,13 +1,12 @@ - package edu.harvard.iq.dataverse.export; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.DvObject; import edu.harvard.iq.dataverse.dataaccess.DataAccess; -import static edu.harvard.iq.dataverse.dataaccess.DataAccess.getDataFileIO; +import static edu.harvard.iq.dataverse.dataaccess.DataAccess.getStorageIO; import edu.harvard.iq.dataverse.dataaccess.DataAccessOption; -import edu.harvard.iq.dataverse.dataaccess.DataFileIO; +import edu.harvard.iq.dataverse.dataaccess.StorageIO; import static edu.harvard.iq.dataverse.dataset.DatasetUtil.datasetLogoThumbnail; import static edu.harvard.iq.dataverse.dataset.DatasetUtil.thumb48addedByImageThumbConverter; import edu.harvard.iq.dataverse.export.spi.Exporter; @@ -44,18 +43,17 @@ * @author skraffmi */ public class ExportService { - + private static ExportService service; private ServiceLoader loader; static SettingsServiceBean settingsService; private ExportService() { - loader = ServiceLoader.load(Exporter.class); + loader = ServiceLoader.load(Exporter.class); } /** - * TODO: Audit all calls to this getInstance method that doesn't take a - * SettingsServiceBean as an argument to make sure nothing broke. + * TODO: Audit all calls to this getInstance method that doesn't take a SettingsServiceBean as an argument to make sure nothing broke. */ public static synchronized ExportService getInstance() { return getInstance(null); @@ -65,12 +63,12 @@ public static synchronized ExportService getInstance(SettingsServiceBean setting ExportService.settingsService = settingsService; if (service == null) { service = new ExportService(); - } else{ + } else { service.loader.reload(); } return service; } - + public List< String[]> getExportersLabels() { List retList = new ArrayList(); Iterator exporters = ExportService.getInstance().loader.iterator(); @@ -88,30 +86,27 @@ public InputStream getExport(Dataset dataset, String formatName) throws ExportEx // first we will try to locate an already existing, cached export // for this format: InputStream exportInputStream = getCachedExportFormat(dataset, formatName); - + if (exportInputStream != null) { return exportInputStream; } - + // if it doesn't exist, we'll try to run the export: - exportFormat(dataset, formatName); - + // and then try again: - exportInputStream = getCachedExportFormat(dataset, formatName); - + if (exportInputStream != null) { return exportInputStream; } - + // if there is no cached export still - we have to give up and throw // an exception! - - throw new ExportException("Failed to export the dataset as "+formatName); - + throw new ExportException("Failed to export the dataset as " + formatName); + } - + public String getExportAsString(Dataset dataset, String formatName) { try { InputStream inputStream = getExport(dataset, formatName); @@ -133,67 +128,64 @@ public String getExportAsString(Dataset dataset, String formatName) { return null; } - - + // This method goes through all the Exporters and calls // the "chacheExport()" method that will save the produced output // in a file in the dataset directory, on each Exporter available. - - public void exportAllFormats (Dataset dataset) throws ExportException { + public void exportAllFormats(Dataset dataset) throws ExportException { try { clearAllCachedFormats(dataset); } catch (IOException ex) { Logger.getLogger(ExportService.class.getName()).log(Level.SEVERE, null, ex); } - + try { DatasetVersion releasedVersion = dataset.getReleasedVersion(); if (releasedVersion == null) { - throw new ExportException("No released version for dataset "+dataset.getGlobalId()); + throw new ExportException("No released version for dataset " + dataset.getGlobalId()); } JsonPrinter jsonPrinter = new JsonPrinter(settingsService); final JsonObjectBuilder datasetAsJsonBuilder = jsonPrinter.jsonAsDatasetDto(releasedVersion); JsonObject datasetAsJson = datasetAsJsonBuilder.build(); - + Iterator exporters = loader.iterator(); - while ( exporters.hasNext()) { + while (exporters.hasNext()) { Exporter e = exporters.next(); - String formatName = e.getProviderName(); - + String formatName = e.getProviderName(); + cacheExport(releasedVersion, formatName, datasetAsJson, e); - + } } catch (ServiceConfigurationError serviceError) { - throw new ExportException("Service configuration error during export. "+serviceError.getMessage()); + throw new ExportException("Service configuration error during export. " + serviceError.getMessage()); } // Finally, if we have been able to successfully export in all available // formats, we'll increment the "last exported" time stamp: - + dataset.setLastExportTime(new Timestamp(new Date().getTime())); - + } - + public void clearAllCachedFormats(Dataset dataset) throws IOException { try { - Iterator exporters = loader.iterator(); - while (exporters.hasNext()) { - Exporter e = exporters.next(); - String formatName = e.getProviderName(); - - clearCachedExport(dataset, formatName); - } - - dataset.setLastExportTime(null); + Iterator exporters = loader.iterator(); + while (exporters.hasNext()) { + Exporter e = exporters.next(); + String formatName = e.getProviderName(); + + clearCachedExport(dataset, formatName); + } + + dataset.setLastExportTime(null); } catch (IOException ex) { //not fatal } } - + // This method finds the exporter for the format requested, // then produces the dataset metadata as a JsonObject, then calls - // the "chacheExport()" method that will save the produced output + // the "cacheExport()" method that will save the produced output // in a file in the dataset directory. - public void exportFormat(Dataset dataset, String formatName) throws ExportException { try { Iterator exporters = loader.iterator(); @@ -215,7 +207,7 @@ public void exportFormat(Dataset dataset, String formatName) throws ExportExcept throw new ExportException("No published version found during export. " + dataset.getGlobalId()); } } - + public Exporter getExporter(String formatName) throws ExportException { try { Iterator exporters = loader.iterator(); @@ -228,13 +220,13 @@ public Exporter getExporter(String formatName) throws ExportException { } catch (ServiceConfigurationError serviceError) { throw new ExportException("Service configuration error during export. " + serviceError.getMessage()); } catch (Exception ex) { - throw new ExportException("Could not find Exporter \""+formatName+"\", unknown exception"); + throw new ExportException("Could not find Exporter \"" + formatName + "\", unknown exception"); } - throw new ExportException("No such Exporter: "+formatName); + throw new ExportException("No such Exporter: " + formatName); } - + // This method runs the selected metadata exporter, caching the output - // in a file in the dataset dirctory: + // in a file in the dataset directory / container based on its DOI: private void cacheExport(DatasetVersion version, String format, JsonObject datasetAsJson, Exporter exporter) throws ExportException { try { if (version.getDataset().getFileSystemDirectory() != null && !Files.exists(version.getDataset().getFileSystemDirectory())) { @@ -251,30 +243,20 @@ private void cacheExport(DatasetVersion version, String format, JsonObject datas // Some drivers (like Swift) do not support that, and will give us an // "operation not supported" exception. If that's the case, we'll have // to save the output into a temp file, and then copy it over to the - // permanent storage using the DataAccess IO "save" command: - + // permanent storage using the IO "save" command: boolean tempFileRequired = false; File tempFile = null; OutputStream outputStream = null; Dataset dataset = version.getDataset(); - DataFileIO dataFileIO = null; - + StorageIO storageIO = null; try { - dataFileIO = DataAccess.createNewDataFileIO(dataset, "file"); - Channel outputChannel = dataFileIO.openAuxChannel(format, DataAccessOption.WRITE_ACCESS); + storageIO = DataAccess.createNewStorageIO(dataset, "file"); + Channel outputChannel = storageIO.openAuxChannel(format, DataAccessOption.WRITE_ACCESS); outputStream = Channels.newOutputStream((WritableByteChannel) outputChannel); } catch (IOException ioex) { tempFileRequired = true; - } - - //todo: move if statement etc into catch above - if (tempFileRequired) { - try { - tempFile = File.createTempFile("tempFileToExport", ".tmp"); - outputStream = new FileOutputStream(tempFile); - } catch (IOException ioex) { - throw new ExportException("IO Exception thrown exporting as " + format); - } + tempFile = File.createTempFile("tempFileToExport", ".tmp"); + outputStream = new FileOutputStream(tempFile); } try { @@ -292,11 +274,11 @@ private void cacheExport(DatasetVersion version, String format, JsonObject datas System.out.println("Saving path as aux for temp file in: " + Paths.get(tempFile.getAbsolutePath())); System.out.println("Temp file to path:" + tempFile.toPath()); - dataFileIO.savePathAsAux(Paths.get(tempFile.getAbsolutePath()), "export_" + format + ".cached"); + storageIO.savePathAsAux(Paths.get(tempFile.getAbsolutePath()), "export_" + format + ".cached"); } - + } catch (IOException ioex) { - throw new ExportException("IO Exception thrown exporting as " + "export_" + format + ".cached"); + throw new ExportException("IO Exception thrown exporting as " + "export_" + format + ".cached"); } } catch (IOException ioex) { @@ -304,39 +286,27 @@ private void cacheExport(DatasetVersion version, String format, JsonObject datas } } - + private void clearCachedExport(Dataset dataset, String format) throws IOException { try { - DataFileIO dataFileIO = getDataFileIO(dataset); - - dataFileIO.deleteAuxObject("export_" + format + ".cached"); - + StorageIO storageIO = getStorageIO(dataset); + storageIO.deleteAuxObject("export_" + format + ".cached"); + } catch (IOException ex) { throw new IOException("IO Exception thrown exporting as " + "export_" + format + ".cached"); } - - -// [original Leonid written code below:] -// if (dataset != null && dataset.getFileSystemDirectory() != null && Files.exists(dataset.getFileSystemDirectory())) { -// -// Path cachedMetadataFilePath = Paths.get(dataset.getFileSystemDirectory().toString(), "export_" + format + ".cached"); -// try { -// Files.delete(cachedMetadataFilePath); -// } catch (IOException ioex) { -// } -// } + } - + // This method checks if the metadata has already been exported in this // format and cached on disk. If it has, it'll open the file and retun // the file input stream. If not, it'll return null. - private InputStream getCachedExportFormat(Dataset dataset, String formatName) throws ExportException, IOException { - DataFileIO dataAccess = null; + StorageIO dataAccess = null; try { - dataAccess = DataAccess.getDataFileIO(dataset); + dataAccess = DataAccess.getStorageIO(dataset); } catch (IOException ioex) { throw new IOException("IO Exception thrown exporting as " + "export_" + formatName + ".cached"); } @@ -351,27 +321,15 @@ private InputStream getCachedExportFormat(Dataset dataset, String formatName) th } catch (IOException ioex) { throw new IOException("IO Exception thrown exporting as " + "export_" + formatName + ".cached"); } - -// try { -// if (dataset.getFileSystemDirectory() != null) { -// Path cachedMetadataFilePath = Paths.get(dataset.getFileSystemDirectory().toString(), "export_" + formatName + ".cached"); -// if (Files.exists(cachedMetadataFilePath)) { -// FileInputStream cachedExportInputStream = new FileInputStream(cachedMetadataFilePath.toFile()); -// return cachedExportInputStream; -// } -// } -// } catch (IOException ioex) { -// // don't do anything - we'll just return null -// } return null; } - + /*The below method, getCachedExportSize(), is not currently used. *An exercise for the reader could be to refactor it if it's needed *to be compatible with storage drivers other than local filesystem. + *Files.exists() would need to be discarded. * -- L.A. 4.8 */ - // public Long getCachedExportSize(Dataset dataset, String formatName) { // try { // if (dataset.getFileSystemDirectory() != null) { @@ -386,9 +344,7 @@ private InputStream getCachedExportFormat(Dataset dataset, String formatName) th // // return null; // } - - - public Boolean isXMLFormat(String provider){ + public Boolean isXMLFormat(String provider) { try { Iterator exporters = loader.iterator(); while (exporters.hasNext()) { @@ -400,7 +356,7 @@ public Boolean isXMLFormat(String provider){ } catch (ServiceConfigurationError serviceError) { serviceError.printStackTrace(); } - return null; + return null; } - + } diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java index ce171b1e3ff..803154a4c2f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java @@ -38,7 +38,7 @@ import edu.harvard.iq.dataverse.MetadataBlock; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.dataaccess.DataAccess; -import edu.harvard.iq.dataverse.dataaccess.DataFileIO; +import edu.harvard.iq.dataverse.dataaccess.StorageIO; import edu.harvard.iq.dataverse.dataaccess.FileAccessIO; import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter; import edu.harvard.iq.dataverse.dataaccess.TabularSubsetGenerator; @@ -269,27 +269,27 @@ public void addFiles (DatasetVersion version, List newFiles) { boolean localFile = false; boolean savedSuccess = false; - DataFileIO dataAccess = null; + StorageIO dataAccess = null; try { - logger.fine("Attempting to create a new DataFileIO object for " + storageId); - dataAccess = DataAccess.createNewDataFileIO(dataFile, storageId); + logger.fine("Attempting to create a new storageIO object for " + storageId); + dataAccess = DataAccess.createNewStorageIO(dataFile, storageId); if (dataAccess.isLocalFile()) { localFile = true; } - logger.fine("Successfully created a new DataFileIO object."); + logger.fine("Successfully created a new storageIO object."); /* This commented-out code demonstrates how to copy bytes from a local InputStream (or a readChannel) into the writable byte channel of a Dataverse DataAccessIO object: */ /* - dataFileIO.open(DataAccessOption.WRITE_ACCESS); + storageIO.open(DataAccessOption.WRITE_ACCESS); - writeChannel = dataFileIO.getWriteChannel(); + writeChannel = storageIO.getWriteChannel(); readChannel = new FileInputStream(tempLocationPath.toFile()).getChannel(); long bytesPerIteration = 16 * 1024; // 16K bytes @@ -308,7 +308,7 @@ from a local InputStream (or a readChannel) into the local filesystem, the DataAccessIO will simply copy the file using Files.copy, like this: - Files.copy(tempLocationPath, dataFileIO.getFileSystemLocation(), StandardCopyOption.REPLACE_EXISTING); + Files.copy(tempLocationPath, storageIO.getFileSystemLocation(), StandardCopyOption.REPLACE_EXISTING); */ dataAccess.savePath(tempLocationPath); @@ -544,8 +544,8 @@ public void produceContinuousSummaryStatistics(DataFile dataFile, File generated for (int i = 0; i < dataFile.getDataTable().getVarQuantity(); i++) { if (dataFile.getDataTable().getDataVariables().get(i).isIntervalContinuous()) { logger.fine("subsetting continuous vector"); - DataFileIO dataFileIO = dataFile.getDataFileIO(); - dataFileIO.open(); + StorageIO storageIO = dataFile.getStorageIO(); + storageIO.open(); if ("float".equals(dataFile.getDataTable().getDataVariables().get(i).getFormat())) { Float[] variableVector = TabularSubsetGenerator.subsetFloatVector(new FileInputStream(generatedTabularFile), i, dataFile.getDataTable().getCaseQuantity().intValue()); logger.fine("Calculating summary statistics on a Float vector;"); @@ -576,8 +576,8 @@ public void produceDiscreteNumericSummaryStatistics(DataFile dataFile, File gene if (dataFile.getDataTable().getDataVariables().get(i).isIntervalDiscrete() && dataFile.getDataTable().getDataVariables().get(i).isTypeNumeric()) { logger.fine("subsetting discrete-numeric vector"); - DataFileIO dataFileIO = dataFile.getDataFileIO(); - dataFileIO.open(); + StorageIO storageIO = dataFile.getStorageIO(); + storageIO.open(); Long[] variableVector = TabularSubsetGenerator.subsetLongVector(new FileInputStream(generatedTabularFile), i, dataFile.getDataTable().getCaseQuantity().intValue()); // We are discussing calculating the same summary stats for // all numerics (the same kind of sumstats that we've been calculating @@ -610,8 +610,8 @@ public void produceCharacterSummaryStatistics(DataFile dataFile, File generatedT for (int i = 0; i < dataFile.getDataTable().getVarQuantity(); i++) { if (dataFile.getDataTable().getDataVariables().get(i).isTypeCharacter()) { - DataFileIO dataFileIO = dataFile.getDataFileIO(); - dataFileIO.open(); + StorageIO storageIO = dataFile.getStorageIO(); + storageIO.open(); logger.fine("subsetting character vector"); String[] variableVector = TabularSubsetGenerator.subsetStringVector(new FileInputStream(generatedTabularFile), i, dataFile.getDataTable().getCaseQuantity().intValue()); //calculateCharacterSummaryStatistics(dataFile, i, variableVector); @@ -680,24 +680,24 @@ public boolean ingestAsTabular(Long datafile_id) { //DataFile dataFile) throws I BufferedInputStream inputStream = null; File additionalData = null; - DataFileIO dataFileIO = null; + StorageIO storageIO = null; try { - dataFileIO = dataFile.getDataFileIO(); - dataFileIO.open(); + storageIO = dataFile.getStorageIO(); + storageIO.open(); - if (dataFileIO.isLocalFile()) { - inputStream = new BufferedInputStream(dataFileIO.getInputStream()); + if (storageIO.isLocalFile()) { + inputStream = new BufferedInputStream(storageIO.getInputStream()); } else { - ReadableByteChannel dataFileChannel = dataFileIO.getReadChannel(); + ReadableByteChannel dataFileChannel = storageIO.getReadChannel(); File tempFile = File.createTempFile("tempIngestSourceFile", ".tmp"); FileChannel tempIngestSourceChannel = new FileOutputStream(tempFile).getChannel(); - tempIngestSourceChannel.transferFrom(dataFileChannel, 0, dataFileIO.getSize()); + tempIngestSourceChannel.transferFrom(dataFileChannel, 0, storageIO.getSize()); inputStream = new BufferedInputStream(new FileInputStream(tempFile)); - logger.fine("Saved "+dataFileIO.getSize()+" bytes in a local temp file."); + logger.fine("Saved "+storageIO.getSize()+" bytes in a local temp file."); } } catch (IOException ioEx) { dataFile.SetIngestProblem(); @@ -849,7 +849,7 @@ public boolean ingestAsTabular(Long datafile_id) { //DataFile dataFile) throws I try { /* Start of save as backup */ - DataFileIO dataAccess = dataFile.getDataFileIO(); + StorageIO dataAccess = dataFile.getStorageIO(); dataAccess.open(); // and we want to save the original of the ingested file: @@ -1335,8 +1335,8 @@ public void performPostProcessingTasks(DataFile dataFile) { */ if (dataFile != null && dataFile.isImage()) { try { - DataFileIO dataAccess = dataFile.getDataFileIO(); - if (dataAccess != null) { // && dataFileIO.isLocalFile()) { + StorageIO dataAccess = dataFile.getStorageIO(); + if (dataAccess != null) { // && storageIO.isLocalFile()) { if (ImageThumbConverter.isThumbnailAvailable(dataFile, ImageThumbConverter.DEFAULT_PREVIEW_SIZE)) { dataFile.setPreviewImageAvailable(true); diff --git a/src/main/java/edu/harvard/iq/dataverse/rserve/RemoteDataFrameService.java b/src/main/java/edu/harvard/iq/dataverse/rserve/RemoteDataFrameService.java index 99b0332a0e3..76736123e5f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/rserve/RemoteDataFrameService.java +++ b/src/main/java/edu/harvard/iq/dataverse/rserve/RemoteDataFrameService.java @@ -21,7 +21,7 @@ import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.dataaccess.DataAccess; -import edu.harvard.iq.dataverse.dataaccess.DataFileIO; +import edu.harvard.iq.dataverse.dataaccess.StorageIO; import edu.harvard.iq.dataverse.dataaccess.DataAccessRequest; import java.io.*; import java.util.*; @@ -564,7 +564,7 @@ public File runDataPreprocessing(DataFile dataFile) { // send the tabular data file to the Rserve side: DataAccessRequest daReq = new DataAccessRequest(); - DataFileIO accessObject = DataAccess.getDataFileIO(dataFile, daReq); + StorageIO accessObject = DataAccess.getStorageIO(dataFile, daReq); if (accessObject == null) { return null; diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index e2225498cec..c23f70158f2 100755 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -1359,6 +1359,10 @@ diff --git a/src/main/webapp/dataverse_template.xhtml b/src/main/webapp/dataverse_template.xhtml index 24443fecd27..40b31d5e3e7 100644 --- a/src/main/webapp/dataverse_template.xhtml +++ b/src/main/webapp/dataverse_template.xhtml @@ -109,32 +109,7 @@ bind_bsui_components(); }); - /* - * Custom Popover with HTML code snippet - */ - function popoverHTML() { - var popoverTemplateHTML = ['
', - '
', - '

', - '
', - '
', - '
'].join(''); - - var popoverContentHTML = ['', - '', - ''].join(''); - - $('body').popover({ - selector: 'span.popoverHTML', - title: '', - trigger: 'hover', - content: popoverContentHTML, - template: popoverTemplateHTML, - placement: "bottom", - container: "#content", - html: true - }); - } + //]]> diff --git a/src/main/webapp/resources/js/dv_rebind_bootstrap_ui.js b/src/main/webapp/resources/js/dv_rebind_bootstrap_ui.js index 0d01d792982..b5d2f3d1e6e 100644 --- a/src/main/webapp/resources/js/dv_rebind_bootstrap_ui.js +++ b/src/main/webapp/resources/js/dv_rebind_bootstrap_ui.js @@ -174,6 +174,34 @@ function post_cancel_edit_files_or_metadata(){ bind_bsui_components(); } +/* +* Custom Popover with HTML code snippet +*/ +function popoverHTML(popoverTitleHTML) { + + var popoverTemplateHTML = ['
', + '
', + '

', + '
', + '
', + '
'].join(''); + + var popoverContentHTML = ['', + '<a>, <b>, <blockquote>, <br>, <code>, <del>, <dd>, <dl>, <dt>, <em>, <hr>, <h1>-<h3>, <i>, <img>, <kbd>, <li>, <ol>, <p>, <pre>, <s>, <sup>, <sub>, <strong>, <strike>, <ul>', + ''].join(''); + + $('body').popover({ + selector: 'span.popoverHTML', + title: popoverTitleHTML, + trigger: 'hover', + content: popoverContentHTML, + template: popoverTemplateHTML, + placement: "bottom", + container: "#content", + html: true + }); +} + /* * Equal Div Height */ diff --git a/src/test/java/edu/harvard/iq/dataverse/dataaccess/DataFileIOTest.java b/src/test/java/edu/harvard/iq/dataverse/dataaccess/StorageIOTest.java similarity index 98% rename from src/test/java/edu/harvard/iq/dataverse/dataaccess/DataFileIOTest.java rename to src/test/java/edu/harvard/iq/dataverse/dataaccess/StorageIOTest.java index 70dc2ae8ea4..be62dea92ed 100644 --- a/src/test/java/edu/harvard/iq/dataverse/dataaccess/DataFileIOTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/dataaccess/StorageIOTest.java @@ -28,8 +28,8 @@ * * @author oscardssmith */ -public class DataFileIOTest { - DataFileIO instance = new FileAccessIO<>(); +public class StorageIOTest { + StorageIO instance = new FileAccessIO<>(); @Test public void testGetChannel() throws FileNotFoundException { diff --git a/src/test/java/edu/harvard/iq/dataverse/dataset/DatasetUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/dataset/DatasetUtilTest.java index a513e990886..5bbe1756bfb 100644 --- a/src/test/java/edu/harvard/iq/dataverse/dataset/DatasetUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/dataset/DatasetUtilTest.java @@ -3,67 +3,49 @@ import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetVersion; +import edu.harvard.iq.dataverse.FileMetadata; +import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter; import edu.harvard.iq.dataverse.mocks.MocksFactory; import java.io.InputStream; import java.util.ArrayList; import java.util.List; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; import org.junit.Test; import static org.junit.Assert.*; public class DatasetUtilTest { - public DatasetUtilTest() { - } - - @BeforeClass - public static void setUpClass() { - } - - @AfterClass - public static void tearDownClass() { - } - - @Before - public void setUp() { - } - - @After - public void tearDown() { - } - /** * Test of getThumbnailCandidates method, of class DatasetUtil. */ @Test public void testGetThumbnailCandidates() { - System.out.println("getThumbnailCandidates"); - Dataset dataset = null; - boolean considerDatasetLogoAsCandidate = false; - List expResult = new ArrayList<>(); - List result = DatasetUtil.getThumbnailCandidates(dataset, considerDatasetLogoAsCandidate); - assertEquals(expResult, result); + assertEquals(new ArrayList<>(), DatasetUtil.getThumbnailCandidates(null, false)); + + Dataset dataset = MocksFactory.makeDataset(); + DataFile dataFile = MocksFactory.makeDataFile(); + dataFile.setContentType("image/"); + dataFile.setOwner(dataset); + dataFile.setStorageIdentifier("file://src/test/resources/images/coffeeshop.png"); + + System.out.println(ImageThumbConverter.isThumbnailAvailable(dataFile)); + DatasetVersion version = dataset.getCreateVersion(); + List fmds = new ArrayList<>(); + fmds.add(MocksFactory.addFileMetadata(dataFile)); + version.setFileMetadatas(fmds); + assertEquals(new ArrayList<>(), DatasetUtil.getThumbnailCandidates(dataset, false)); } @Test public void testGetThumbnailNullDataset() { - System.out.println("testGetThumbnailNullDataset"); - Dataset dataset = null; - DatasetThumbnail expResult = null; - DatasetThumbnail result = DatasetUtil.getThumbnail(dataset); - assertEquals(expResult, result); - } + assertNull(DatasetUtil.getThumbnail(null)); + assertNull(DatasetUtil.getThumbnail(null, null)); - @Test - public void testGetThumbnailUseGeneric() { - System.out.println("testGetThumbnailUseGeneric"); - Dataset dataset = new Dataset(); + Dataset dataset = MocksFactory.makeDataset(); + dataset.setStorageIdentifier("file://"); dataset.setUseGenericThumbnail(true); - DatasetThumbnail result = DatasetUtil.getThumbnail(dataset); - assertNull(result); + + assertNull(DatasetUtil.getThumbnail(dataset)); + assertNull(DatasetUtil.getThumbnail(dataset, new DatasetVersion())); } @Test @@ -77,26 +59,13 @@ public void testGetThumbnailRestricted() { DatasetThumbnail result = DatasetUtil.getThumbnail(dataset); assertNull(result); } - - @Test - public void testGetThumbnailNullDatasetNullDatasetVersion() { - System.out.println("testGetThumbnailNullDatasetNullDatasetVersion"); - Dataset dataset = null; - DatasetVersion datasetVersion = null; - DatasetThumbnail result = DatasetUtil.getThumbnail(dataset, datasetVersion); - assertEquals(null, result); - } - /** * Test of deleteDatasetLogo method, of class DatasetUtil. */ @Test public void testDeleteDatasetLogo() { - System.out.println("deleteDatasetLogo"); - Dataset dataset = null; - boolean expResult = false; - boolean result = DatasetUtil.deleteDatasetLogo(dataset); - assertEquals(expResult, result); + assertEquals(false, DatasetUtil.deleteDatasetLogo(null)); + assertEquals(false, DatasetUtil.deleteDatasetLogo(new Dataset())); } /** @@ -104,11 +73,7 @@ public void testDeleteDatasetLogo() { */ @Test public void testGetDefaultThumbnailFile() { - System.out.println("getDefaultThumbnailFile"); - Dataset dataset = null; - DataFile expResult = null; - DataFile result = DatasetUtil.attemptToAutomaticallySelectThumbnailFromDataFiles(dataset, null); - assertEquals(expResult, result); + assertNull(DatasetUtil.attemptToAutomaticallySelectThumbnailFromDataFiles(null, null)); } /** @@ -117,12 +82,8 @@ public void testGetDefaultThumbnailFile() { */ @Test public void testPersistDatasetLogoToStorageAndCreateThumbnail() { - System.out.println("persistDatasetLogoToStorageAndCreateThumbnail"); - Dataset dataset = null; - InputStream inputStream = null; - Dataset expResult = null; - Dataset result = DatasetUtil.persistDatasetLogoToStorageAndCreateThumbnail(dataset, inputStream); - assertEquals(expResult, result); + assertNull(DatasetUtil.persistDatasetLogoToStorageAndCreateThumbnail(null, null)); + //Todo: a test for this that test main logic } /** @@ -130,11 +91,7 @@ public void testPersistDatasetLogoToStorageAndCreateThumbnail() { */ @Test public void testGetThumbnailAsInputStream() { - System.out.println("getThumbnailAsInputStream"); - Dataset dataset = null; - InputStream expResult = null; - InputStream result = DatasetUtil.getThumbnailAsInputStream(dataset); - assertEquals(expResult, result); + assertNull(DatasetUtil.getThumbnailAsInputStream(null)); } /** @@ -142,7 +99,6 @@ public void testGetThumbnailAsInputStream() { */ @Test public void testIsDatasetLogoPresent() { - System.out.println("isDatasetLogoPresent"); Dataset dataset = MocksFactory.makeDataset(); assertEquals(false, DatasetUtil.isDatasetLogoPresent(dataset)); }