From dc7cdd0443ba18b13775fa84cd3391caa4d7af71 Mon Sep 17 00:00:00 2001 From: yakutovicha Date: Fri, 13 Jul 2018 15:29:10 +0200 Subject: [PATCH] Move verdi data to click (#1738) * Data subcommand move to click: 1) create a folder for data click commands 2) place subcommands of verdi data in different files * data upf is moved to click * Put list-related functions to a separate file * data bands (not fully) transferred to click * Grouping the list option arguments * Removing not needed load_dbenv * data bands are fully transferred to click * More progress on cif * Remove label and description from data lavel * More progress on cif * data parameter are transferred to click * More progress on cif * Put export in a separate file * More progress on cif * More progress on cif * data remote is fully transferred to click * More progress on cif deposit * More progress on cif deposit * Ported verdi data structure to click * Transfer verdi data trajectory to click * Transfer verdi data array to click * Deposit function in a separate file * More progress on data cif * Small changes in data cif * Fixing various errors * More progress on tests * More progress on verdi data cif tests * Polish the verdi data code: - remove unnecessary imports - put show functionality in a separate file - put commonly used click options in isolated containers * More progress in tests * Transfer verdi data cif to click * Cleaning & correcting the code. * Cleaning tests, cif & general code, fixing problems in argument grouping * Adding tests for cif import * More progress on cif tests * Loading correctly db_env to not affect the tests * Cif tests should be OK (need some cleaning but are complete). We should see why and if we need to have so many flags that are not used. * Improving structure data listing. Starting structure data tests * More progress on verdi data tests * Structure listing test added * More progress on TrajectoryData list testing * Finished with array and bands tests * Add tests for verdi data parameter * some progress on verdi data remote tests * finish tests for verdi data remote * Trajectory data listing test OK * Verdi band listing tested more extensively, uses common listing arguments and has proper group support * verdi data cif list test uses also the common method for testing * Minor beautification of the code * finish with tests for verdi data upf * Remove --elements options where not needed * verdi data export tests for structure and trajectory too * More on verdi data trajectory export tests * verdi data cif export uses common testing mechanism with structure and trajectory * Add checks to verdi data cif,structure,trajectory * Remove redundant tests * Remove redundant test from the list of backend tests * Solve formatting problems * Adapt array.py and bands.py to python convention * Adapt cif.py to python format convention * Adapt deposit.py to python format convention * Adapt export.py to python format convention * Adapt list.py to python format convention * Adapt parameter.py to python format convention * Adapt remote.py to python format convention * Adapt show.py to python format convention * Adapt structure.py to python format convention * Adapt trajectory.py to python format convention * Adapt upf.py to python format convention * format changes at some random places * Fix remaining format changes --- .pre-commit-config.yaml | 12 + aiida/backends/tests/__init__.py | 3 +- .../tests/cmdline/commands/test_data.py | 1073 +++++++++ aiida/backends/tests/verdi_commands.py | 518 ---- aiida/cmdline/commands/data.py | 2142 ----------------- aiida/cmdline/commands/data/__init__.py | 48 + aiida/cmdline/commands/data/array.py | 44 + aiida/cmdline/commands/data/bands.py | 220 ++ aiida/cmdline/commands/data/cif.py | 174 ++ aiida/cmdline/commands/data/deposit.py | 112 + aiida/cmdline/commands/data/export.py | 112 + aiida/cmdline/commands/data/list.py | 114 + aiida/cmdline/commands/data/parameter.py | 42 + aiida/cmdline/commands/data/remote.py | 94 + aiida/cmdline/commands/data/show.py | 210 ++ aiida/cmdline/commands/data/structure.py | 374 +++ aiida/cmdline/commands/data/trajectory.py | 164 ++ aiida/cmdline/commands/data/upf.py | 157 ++ aiida/cmdline/commands/devel.py | 3 +- aiida/cmdline/params/options/overridable.py | 2 +- docs/source/developer_guide/data_cmdline.rst | 22 +- 21 files changed, 2962 insertions(+), 2678 deletions(-) create mode 100644 aiida/backends/tests/cmdline/commands/test_data.py delete mode 100644 aiida/backends/tests/verdi_commands.py delete mode 100644 aiida/cmdline/commands/data.py create mode 100644 aiida/cmdline/commands/data/__init__.py create mode 100644 aiida/cmdline/commands/data/array.py create mode 100644 aiida/cmdline/commands/data/bands.py create mode 100644 aiida/cmdline/commands/data/cif.py create mode 100644 aiida/cmdline/commands/data/deposit.py create mode 100644 aiida/cmdline/commands/data/export.py create mode 100644 aiida/cmdline/commands/data/list.py create mode 100644 aiida/cmdline/commands/data/parameter.py create mode 100644 aiida/cmdline/commands/data/remote.py create mode 100644 aiida/cmdline/commands/data/show.py create mode 100644 aiida/cmdline/commands/data/structure.py create mode 100644 aiida/cmdline/commands/data/trajectory.py create mode 100644 aiida/cmdline/commands/data/upf.py diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b54660e0f5..eed496e681 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -16,6 +16,18 @@ aiida/cmdline/commands/calculation.py| aiida/cmdline/commands/comment.py| aiida/cmdline/commands/daemon.py| + aiida/cmdline/commands/data/array.py| + aiida/cmdline/commands/data/bands.py| + aiida/cmdline/commands/data/cif.py| + aiida/cmdline/commands/data/deposit.py| + aiida/cmdline/commands/data/export.py| + aiida/cmdline/commands/data/list.py| + aiida/cmdline/commands/data/parameter.py| + aiida/cmdline/commands/data/remote.py| + aiida/cmdline/commands/data/show.py| + aiida/cmdline/commands/data/structure.py| + aiida/cmdline/commands/data/trajectory.py| + aiida/cmdline/commands/data/upf.py| aiida/cmdline/commands/code.py| aiida/cmdline/commands/graph.py| aiida/cmdline/commands/group.py| diff --git a/aiida/backends/tests/__init__.py b/aiida/backends/tests/__init__.py index 1f51e2e840..47def53a23 100644 --- a/aiida/backends/tests/__init__.py +++ b/aiida/backends/tests/__init__.py @@ -71,6 +71,8 @@ 'cmdline.params.types.identifier': ['aiida.backends.tests.cmdline.params.types.test_identifier'], 'cmdline.params.types.node': ['aiida.backends.tests.cmdline.params.types.test_node'], 'cmdline.params.types.plugin': ['aiida.backends.tests.cmdline.params.types.test_plugin'], + 'cmdline.commands.code': ['aiida.backends.tests.cmdline.commands.test_code'], + 'cmdline.commands.data': ['aiida.backends.tests.cmdline.commands.test_data'], 'cmdline.params.types.workflow': ['aiida.backends.tests.cmdline.params.types.test_workflow'], 'daemon.client': ['aiida.backends.tests.daemon.test_client'], 'orm.data.frozendict': ['aiida.backends.tests.orm.data.frozendict'], @@ -94,7 +96,6 @@ 'work.job_processes': ['aiida.backends.tests.work.job_processes'], 'plugin_loader': ['aiida.backends.tests.test_plugin_loader'], 'daemon': ['aiida.backends.tests.daemon'], - 'verdi_commands': ['aiida.backends.tests.verdi_commands'], 'caching_config': ['aiida.backends.tests.test_caching_config'], 'inline_calculation': ['aiida.backends.tests.inline_calculation'], } diff --git a/aiida/backends/tests/cmdline/commands/test_data.py b/aiida/backends/tests/cmdline/commands/test_data.py new file mode 100644 index 0000000000..e610804cf4 --- /dev/null +++ b/aiida/backends/tests/cmdline/commands/test_data.py @@ -0,0 +1,1073 @@ +import sys +import os +import shutil +import unittest +import tempfile +import numpy as np +import subprocess as sp + +from click.testing import CliRunner + + +from aiida.orm import Computer +from aiida.cmdline.utils import echo +from aiida.orm.group import Group +from aiida.orm.data.array import ArrayData +from aiida.orm.data.array.bands import BandsData +from aiida.orm.data.array.kpoints import KpointsData +from aiida.orm.data.cif import CifData +from aiida.orm.data.parameter import ParameterData +from aiida.orm.data.remote import RemoteData +from aiida.orm.data.structure import StructureData +from aiida.orm.data.array.trajectory import TrajectoryData + +from aiida.orm.backend import construct_backend +from aiida.backends.testbase import AiidaTestCase +from aiida.cmdline.commands.data import array +from aiida.cmdline.commands.data import bands +from aiida.cmdline.commands.data import cif +from aiida.cmdline.commands.data import parameter +from aiida.cmdline.commands.data import remote +from aiida.cmdline.commands.data import structure +from aiida.cmdline.commands.data import trajectory +from aiida.cmdline.commands.data import upf + +from aiida.backends.utils import get_backend_type + +if get_backend_type() == 'sqlalchemy': + from aiida.backends.sqlalchemy.models.authinfo import DbAuthInfo +else: + from aiida.backends.djsite.db.models import DbAuthInfo + + +from unittest import skip + +from aiida.work.workfunctions import workfunction as wf + + +from contextlib import contextmanager +from StringIO import StringIO + +@contextmanager +def captured_output(): + new_out, new_err = StringIO(), StringIO() + old_out, old_err = sys.stdout, sys.stderr + try: + sys.stdout, sys.stderr = new_out, new_err + yield sys.stdout, sys.stderr + finally: + sys.stdout, sys.stderr = old_out, old_err + + +class TestVerdiDataExportable: + + def __init__(self): + pass + + NODE_ID_STR = "node_id" + EMPTY_GROUP_ID_STR = 'empty_group_id' + EMPTY_GROUP_NAME_STR = 'empty_group' + NON_EMPTY_GROUP_ID_STR = 'non_empty_group_id' + NON_EMPTY_GROUP_NAME_STR = 'non_empty_group' + + def data_export_test(self, datatype, ids, supported_formats): + """ + This method tests that the data listing works as expected with all + possible flags and arguments for different datatypes. + """ + + from aiida.cmdline.commands.data.cif import export as export_cif + from aiida.cmdline.commands.data.structure import export as export_str + from aiida.cmdline.commands.data.trajectory import export as export_tr + + datatype_mapping = { + CifData: export_cif, + StructureData: export_str, + TrajectoryData: export_tr, + } + + if datatype is None or datatype not in datatype_mapping.keys(): + raise Exception("The listing of the objects {} is not supported" + .format(datatype)) + + export_cmd = datatype_mapping[datatype] + + # Check that the simple command works as expected + options = [str(ids[self.NODE_ID_STR])] + res = self.cli_runner.invoke(export_cmd, options, + catch_exceptions=False) + self.assertEquals(res.exit_code, 0, "The command did not finish " + "correctly") + + dump_flags = ['-y', '--format'] + for flag in dump_flags: + for format in supported_formats: + # with captured_output() as (out, err): + options = [flag, format, str(ids[self.NODE_ID_STR])] + res = self.cli_runner.invoke(export_cmd, options, + catch_exceptions=False) + self.assertEquals(res.exit_code, 0, + "The command did not finish " + "correctly") + + +# # The --parameter-data flag is not implemented and it should fail +# options = ['--parameter-data', '0', str(ids[self.NODE_ID_STR])] +# res = self.cli_runner.invoke(export_cmd, options, +# catch_exceptions=False) +# self.assertNotEquals(res.exit_code, 0, +# "The command should not finish correctly and" +# "return normal termination exit status.") +# +# # The following flags fail. +# # We have to see why. The --reduce-symmetry seems to work in +# # the original code. The other one not. +# symmetry_flags = ['--reduce-symmetry', '--no-reduce-symmetry'] +# for flag in symmetry_flags: +# options = [flag, str(ids[self.NODE_ID_STR])] +# res = self.cli_runner.invoke(export_cmd, options, +# catch_exceptions=False) +# self.assertNotEquals(res.exit_code, 0, +# "The command should not finish correctly and" +# "return normal termination exit status.") +# +# +# # The following two flags are not implemented and should return +# # an error: +# # --dump-aiida-database / --no-dump-aiida-database +# dump_flags = ['--dump-aiida-database' , '--no-dump-aiida-database'] +# for flag in dump_flags: +# options = [flag, str(ids[self.NODE_ID_STR])] +# res = self.cli_runner.invoke(export_cmd, options, +# catch_exceptions=False) +# self.assertNotEquals(res.exit_code, 0, +# "The command should not finish correctly and" +# "return normal termination exit status.") +# +# +# # The following two flags are not implemented and should return +# # an error: +# # --exclude-external-contents / --no-exclude-external-contents +# external_cont_flags = ['--exclude-external-contents' , +# '--no-exclude-external-contents'] +# for flag in external_cont_flags: +# options = [flag, str(ids[self.NODE_ID_STR])] +# res = self.cli_runner.invoke(export_cmd, options, +# catch_exceptions=False) +# self.assertNotEquals(res.exit_code, 0, +# "The command should not finish correctly and" +# "return normal termination exit status.") +# +# +# # The following two flags are not implemented and should return +# # an error: +# # --gzip / --no-gzip +# gzip_flags = ['--gzip' , '--no-gzip'] +# for flag in gzip_flags: +# options = [flag, str(ids[self.NODE_ID_STR])] +# res = self.cli_runner.invoke(export_cmd, options, +# catch_exceptions=False) +# +# self.assertNotEquals(res.exit_code, 0, +# "The command should not finish correctly and" +# "return normal termination exit status.") +# +# # The --gzip-threshold flag is not implemented and it should fail +# options = ['--gzip-threshold', '1', str(ids[self.NODE_ID_STR])] +# res = self.cli_runner.invoke(export_cmd, options, +# catch_exceptions=False) +# self.assertNotEquals(res.exit_code, 0, +# "The command should not finish correctly and" +# "return normal termination exit status.") + + # Check that the output to file flags work correctly: + # -o, --output + output_flags = ['-o', '--output'] + for flag in output_flags: + try: + tmpd = tempfile.mkdtemp() + filepath = os.path.join(tmpd, 'output_file.txt') + options = [flag, filepath, str(ids[self.NODE_ID_STR])] + res = self.cli_runner.invoke(export_cmd, options, + catch_exceptions=False) + self.assertEquals(res.exit_code, 0, + "The command should finish correctly." + "Output: {}".format(res.output_bytes)) + + # Try to export it again. It should fail because the + # file exists + res = self.cli_runner.invoke(export_cmd, options, + catch_exceptions=False) + self.assertNotEquals(res.exit_code, 0, + "The command should fail because the " + "file already exists") + + # Now we force the export of the file and it should overwrite + # existing files + options = [flag, filepath, '-f', str(ids[self.NODE_ID_STR])] + res = self.cli_runner.invoke(export_cmd, options, + catch_exceptions=False) + self.assertEquals(res.exit_code, 0, + "The command should finish correctly." + "Output: {}".format(res.output_bytes)) + finally: + shutil.rmtree(tmpd) + + +class TestVerdiDataListable: + + def __init__(self): + pass + + NODE_ID_STR = "node_id" + EMPTY_GROUP_ID_STR = 'empty_group_id' + EMPTY_GROUP_NAME_STR = 'empty_group' + NON_EMPTY_GROUP_ID_STR = 'non_empty_group_id' + NON_EMPTY_GROUP_NAME_STR = 'non_empty_group' + + def data_listing_test(self, datatype, search_string, ids): + """ + This method tests that the data listing works as expected with all + possible flags and arguments for different datatypes. + """ + + from aiida.cmdline.commands.data.cif import cif_list + from aiida.cmdline.commands.data.structure import list_structures + from aiida.cmdline.commands.data.trajectory import list_trajections + from aiida.cmdline.commands.data.bands import bands_list + + from aiida.cmdline.commands.data.structure import PROJECT_HEADERS as p_str + from aiida.cmdline.commands.data.cif import PROJECT_HEADERS as p_cif + from aiida.cmdline.commands.data.trajectory import PROJECT_HEADERS as p_tr + from aiida.cmdline.commands.data.bands import PROJECT_HEADERS as p_bands + + headers_mapping = { + CifData: p_cif, + StructureData: p_str, + TrajectoryData: p_tr, + BandsData: p_bands + } + + datatype_mapping = { + CifData: cif_list, + StructureData: list_structures, + TrajectoryData: list_trajections, + BandsData: bands_list + } + + if datatype is None or datatype not in datatype_mapping.keys(): + raise Exception("The listing of the objects {} is not supported" + .format(datatype)) + + listing_cmd = datatype_mapping[datatype] + project_headers = headers_mapping[datatype] + + # Check that the normal listing works as expected + res = self.cli_runner.invoke(listing_cmd, [], + catch_exceptions=False) + self.assertIn(search_string, res.output_bytes, + 'The string {} was not found in the listing' + .format(search_string)) + + # Check that the past days filter works as expected + past_days_flags = ['-p', '--past-days'] + # past_days_flags = ['-p'] + for flag in past_days_flags: + options = [flag, '1'] + res = self.cli_runner.invoke(listing_cmd, options, + catch_exceptions=False) + self.assertIn(search_string, res.output_bytes, + 'The string {} was not found in the listing' + .format(search_string)) + + options = [flag, '0'] + res = self.cli_runner.invoke(listing_cmd, options, + catch_exceptions=False) + self.assertNotIn(search_string, res.output_bytes, + 'A not expected string {} was found in the listing' + .format(search_string)) + + # Check that the group filter works as expected + group_flags = ['-G', '--groups'] + for flag in group_flags: + # Non empty group + for non_empty in [self.NON_EMPTY_GROUP_NAME_STR, + str(ids[self.NON_EMPTY_GROUP_ID_STR])]: + options = [flag, non_empty] + res = self.cli_runner.invoke(listing_cmd, options, + catch_exceptions=False) + self.assertIn(search_string, res.output_bytes, + 'The string {} was not found in the listing') + + # Empty group + for empty in [self.EMPTY_GROUP_NAME_STR, + str(ids[self.EMPTY_GROUP_ID_STR])]: + options = [flag, empty] + res = self.cli_runner.invoke(listing_cmd, options, + catch_exceptions=False) + self.assertNotIn( + search_string, res.output_bytes, + 'A not expected string {} was found in the listing') + + # Group combination + for non_empty in [self.NON_EMPTY_GROUP_NAME_STR, + str(ids[self.NON_EMPTY_GROUP_ID_STR])]: + for empty in [self.EMPTY_GROUP_NAME_STR, + str(ids[self.EMPTY_GROUP_ID_STR])]: + options = [flag, non_empty, empty] + res = self.cli_runner.invoke(listing_cmd, options, + catch_exceptions=False) + self.assertIn(search_string, res.output_bytes, + 'The string {} was not found in the listing') + + # Check raw flag + raw_flags = ['-r', '--raw'] + for flag in raw_flags: + options = [flag] + res = self.cli_runner.invoke(listing_cmd, options, + catch_exceptions=False) + for header in project_headers: + self.assertNotIn(header, res.output_bytes) + +class TestVerdiData(AiidaTestCase): + """ + Testing reachability of the verdi data subcommands + """ + @classmethod + def setUpClass(cls): + super(TestVerdiData, cls).setUpClass() + + def setUp(self): + pass + + def test_reachable(self): + """ + Testing reachability of the following commands: + verdi data array + verdi data bands + verdi data cif + verdi data parameter + verdi data remote + verdi data structure + verdi data trajectory + verdi data upf + """ + subcommands = ['array', 'bands', 'cif', 'parameter', 'remote', + 'structure', 'trajectory', 'upf'] + for sub_cmd in subcommands: + output = sp.check_output(['verdi', 'data', sub_cmd, '--help']) + self.assertIn( + 'Usage:', output, + "Sub-command verdi data {} --help failed.". format(sub_cmd)) + +class TestVerdiDataArray(AiidaTestCase): + """ + Testing verdi data array + """ + @classmethod + def setUpClass(cls): + super(TestVerdiDataArray, cls).setUpClass() + + + def setUp(self): + self.a = ArrayData() + self.a.set_array('test_array', np.array([0, 1, 3])) + self.a.store() + + self.cli_runner = CliRunner() + + def test_arrayshowhelp(self): + output = sp.check_output(['verdi', 'data', 'array', 'show', '--help']) + self.assertIn( + 'Usage:', output, + "Sub-command verdi data array show --help failed.") + + def test_arrayshow(self): + # with captured_output() as (out, err): + options = [str(self.a.id)] + res = self.cli_runner.invoke(array.show, options, + catch_exceptions=False) + self.assertEquals(res.exit_code, 0, + "The command did not finish " + "correctly") + +class TestVerdiDataBands(AiidaTestCase, TestVerdiDataListable): + """ + Testing verdi data bands + """ + + @staticmethod + def create_structure_bands(): + alat = 4. # angstrom + cell = [[alat, 0., 0., ], + [0., alat, 0., ], + [0., 0., alat, ], + ] + s = StructureData(cell=cell) + s.append_atom(position=(0., 0., 0.), symbols='Fe') + s.append_atom(position=(alat / 2., alat / 2., alat / 2.), symbols='O') + s.store() + + @wf + def connect_structure_bands(structure): + alat = 4. + cell = np.array([[alat, 0., 0.], + [0., alat, 0.], + [0., 0., alat], + ]) + + k = KpointsData() + k.set_cell(cell) + k.set_kpoints_path([('G', 'M', 2)]) + + b = BandsData() + b.set_kpointsdata(k) + b.set_bands([[1.0, 2.0], [3.0, 4.0]]) + + k.store() + b.store() + + return b + + b = connect_structure_bands(s) + + # Create 2 groups and add the data to one of them + g_ne = Group(name='non_empty_group') + g_ne.store() + g_ne.add_nodes(b) + + g_e = Group(name='empty_group') + g_e.store() + + return { + TestVerdiDataListable.NODE_ID_STR: b.id, + TestVerdiDataListable.NON_EMPTY_GROUP_ID_STR: g_ne.id, + TestVerdiDataListable.EMPTY_GROUP_ID_STR: g_e.id + } + + @classmethod + def setUpClass(cls): + super(TestVerdiDataBands, cls).setUpClass() + cls.ids = cls.create_structure_bands() + + def setUp(self): + self.cli_runner = CliRunner() + + def test_bandsshowhelp(self): + output = sp.check_output(['verdi', 'data', 'bands', 'show', '--help']) + self.assertIn( + 'Usage:', output, + "Sub-command verdi data bands show --help failed.") + + def test_bandlistshelp(self): + output = sp.check_output(['verdi', 'data', 'bands', 'list', '--help']) + self.assertIn( + 'Usage:', output, + "Sub-command verdi data bands show --help failed.") + + def test_bandslist(self): + from aiida.orm.data.array.bands import BandsData + + self.data_listing_test(BandsData, 'FeO', self.ids) + + + def test_bandexporthelp(self): + output = sp.check_output(['verdi', 'data', 'bands', 'export', '--help']) + self.assertIn( + 'Usage:', output, + "Sub-command verdi data bands export --help failed.") + + def test_bandsexport(self): + options = [str(self.ids[TestVerdiDataListable.NODE_ID_STR])] + res = self.cli_runner.invoke(bands.export, options, + catch_exceptions=False) + self.assertEquals(res.exit_code, 0, + "The command did not finish " + "correctly") + self.assertIn("[1.0, 3.0]", res.output_bytes, + 'The string [1.0, 3.0] was not found in the bands' + 'export') + +class TestVerdiDataParameter(AiidaTestCase): + """ + Testing verdi data parameter + """ + @classmethod + def setUpClass(cls): + super(TestVerdiDataParameter, cls).setUpClass() + + def setUp(self): + self.p = ParameterData() + self.p.set_dict({'a':1, 'b':2}) + self.p.store() + + self.cli_runner = CliRunner() + + def test_parametershowhelp(self): + output = sp.check_output(['verdi', 'data', 'parameter', 'show', '--help']) + self.assertIn( + 'Usage:', output, + "Sub-command verdi data parameter show --help failed.") + + def test_parametershow(self): + supported_formats = ['json_date'] + for format in supported_formats: + options = [str(self.p.id)] + res = self.cli_runner.invoke(parameter.show, options, + catch_exceptions=False) + self.assertEquals(res.exit_code, 0, + "The command verdi data parameter show did not" + " finish correctly") + self.assertIn('"a": 1', res.output_bytes, + 'The string "a": 1 was not found in the output' + ' of verdi data parameter show') + +class TestVerdiDataRemote(AiidaTestCase): + """ + Testing verdi data remote + """ + @classmethod + def setUpClass(cls): + super(TestVerdiDataRemote, cls).setUpClass() + new_comp = Computer(name='comp', + hostname='localhost', + transport_type='local', + scheduler_type='direct', + workdir=tempfile.mkdtemp()) + new_comp.store() + b = construct_backend() + aiidauser = b.users.get_automatic_user() + authinfo = DbAuthInfo(dbcomputer=new_comp.dbcomputer, + aiidauser=aiidauser.dbuser) + authinfo.save() + + def setUp(self): + comp = Computer.get('comp') + self.r = RemoteData() + p = tempfile.mkdtemp() + self.r.set_remote_path(p) + with open(p+'/file.txt', 'w') as f: + f.write("test string") + self.r.set_computer(comp) + self.r.store() + + self.cli_runner = CliRunner() + + def test_remoteshowhelp(self): + output = sp.check_output(['verdi', 'data', 'remote', 'show', '--help']) + self.assertIn( + 'Usage:', output, + "Sub-command verdi data remote show --help failed.") + + def test_remoteshow(self): + options = [str(self.r.id)] + res = self.cli_runner.invoke(remote.show, options, + catch_exceptions=False) + self.assertEquals(res.exit_code, 0, + "The command verdi data remote show did not" + " finish correctly") + self.assertIn('Remote computer name:', res.output_bytes, + 'The string "Remote computer name:" was not found in the' + ' output of verdi data remote show') + self.assertIn('Remote folder full path:', res.output_bytes, + 'The string "Remote folder full path:" was not found in the' + ' output of verdi data remote show') + + def test_remotelshelp(self): + output = sp.check_output(['verdi', 'data', 'remote', 'ls', '--help']) + self.assertIn( + 'Usage:', output, + "Sub-command verdi data remote ls --help failed.") + + def test_remotels(self): + options = ['--long', str(self.r.id)] + res = self.cli_runner.invoke(remote.lsfunction, options, + catch_exceptions=False) + self.assertEquals(res.exit_code, 0, + "The command verdi data remote ls did not" + " finish correctly") + self.assertIn('file.txt', res.output_bytes, + 'The file "file.txt" was not found in the output' + ' of verdi data remote ls') + + def test_remotecathelp(self): + output = sp.check_output(['verdi', 'data', 'remote', 'cat', '--help']) + self.assertIn( + 'Usage:', output, + "Sub-command verdi data remote cat --help failed.") + + def test_remotecat(self): + options = [str(self.r.id), 'file.txt'] + res = self.cli_runner.invoke(remote.cat, options, + catch_exceptions=False) + self.assertEquals(res.exit_code, 0, + "The command verdi data parameter cat did not" + " finish correctly") + self.assertIn('test string', res.output_bytes, + 'The string "test string" was not found in the output' + ' of verdi data remote cat file.txt') + + +class TestVerdiDataTrajectory(AiidaTestCase, TestVerdiDataListable, + TestVerdiDataExportable): + + @staticmethod + def create_trajectory_data(): + from aiida.orm.data.array.trajectory import TrajectoryData + from aiida.orm.group import Group + import numpy + + # Create a node with two arrays + n = TrajectoryData() + + # I create sample data + stepids = numpy.array([60, 70]) + times = stepids * 0.01 + cells = numpy.array([ + [[2., 0., 0., ], + [0., 2., 0., ], + [0., 0., 2., ]], + [[3., 0., 0., ], + [0., 3., 0., ], + [0., 0., 3., ]]]) + symbols = numpy.array(['H', 'O', 'C']) + positions = numpy.array([ + [[0., 0., 0.], + [0.5, 0.5, 0.5], + [1.5, 1.5, 1.5]], + [[0., 0., 0.], + [0.5, 0.5, 0.5], + [1.5, 1.5, 1.5]]]) + velocities = numpy.array([ + [[0., 0., 0.], + [0., 0., 0.], + [0., 0., 0.]], + [[0.5, 0.5, 0.5], + [0.5, 0.5, 0.5], + [-0.5, -0.5, -0.5]]]) + + # I set the node + n.set_trajectory(stepids=stepids, cells=cells, symbols=symbols, + positions=positions, times=times, + velocities=velocities) + + n.store() + + # Create 2 groups and add the data to one of them + g_ne = Group(name='non_empty_group') + g_ne.store() + g_ne.add_nodes(n) + + g_e = Group(name='empty_group') + g_e.store() + + return { + TestVerdiDataListable.NODE_ID_STR: n.id, + TestVerdiDataListable.NON_EMPTY_GROUP_ID_STR: g_ne.id, + TestVerdiDataListable.EMPTY_GROUP_ID_STR: g_e.id + } + + @classmethod + def setUpClass(cls): + super(TestVerdiDataTrajectory, cls).setUpClass() + new_comp = Computer(name='comp', + hostname='localhost', + transport_type='local', + scheduler_type='direct', + workdir='/tmp/aiida') + new_comp.store() + cls.ids = cls.create_trajectory_data() + + def setUp(self): + self.comp = Computer.get('comp') + self.runner = CliRunner() + self.this_folder = os.path.dirname(__file__) + self.this_file = os.path.basename(__file__) + + self.cli_runner = CliRunner() + + def test_deposithelp(self): + res = self.runner.invoke(trajectory.deposit, ['--help']) + self.assertIn('Usage:', res.output_bytes, + 'The string "Usage: " was not found in the output' + ' of verdi data trajectory deposit --help') + + def test_showhelp(self): + res = self.runner.invoke(trajectory.show, ['--help']) + self.assertIn('Usage:', res.output_bytes, + 'The string "Usage: " was not found in the output' + ' of verdi data trajecotry show --help') + + def test_list(self): + self.data_listing_test( + TrajectoryData, str(self.ids[TestVerdiDataListable.NODE_ID_STR]), + self.ids) + + def test_export(self): + from aiida.cmdline.commands.data.trajectory import SUPPORTED_FORMATS + from aiida.cmdline.commands.data.trajectory import export + + new_supported_formats = list(SUPPORTED_FORMATS) + # TCOD export needs special arguments + new_supported_formats.remove('tcod') + self.data_export_test(TrajectoryData, self.ids, new_supported_formats) + # Check independently the TCOD export that needs special arguments + dump_flags = ['-y', '--format'] + for flag in dump_flags: + options = [flag, 'tcod', '--step', '0',str(self.ids[self.NODE_ID_STR])] + res = self.cli_runner.invoke(export, options, + catch_exceptions=False) + self.assertEquals(res.exit_code, 0, + "The command did not finish " + "correctly") + + +class TestVerdiDataStructure(AiidaTestCase, TestVerdiDataListable, + TestVerdiDataExportable): + + @staticmethod + def create_structure_data(): + from aiida.orm.data.structure import StructureData, Site, Kind + from aiida.orm.group import Group + + alat = 4. # angstrom + cell = [[alat, 0., 0., ], + [0., alat, 0., ], + [0., 0., alat, ], + ] + + # BaTiO3 cubic structure + struc = StructureData(cell=cell) + struc.append_atom(position=(0., 0., 0.), symbols='Ba') + struc.append_atom(position=(alat / 2., alat / 2., alat / 2.), symbols='Ti') + struc.append_atom(position=(alat / 2., alat / 2., 0.), symbols='O') + struc.append_atom(position=(alat / 2., 0., alat / 2.), symbols='O') + struc.append_atom(position=(0., alat / 2., alat / 2.), symbols='O') + struc.store() + + # Create 2 groups and add the data to one of them + g_ne = Group(name='non_empty_group') + g_ne.store() + g_ne.add_nodes(struc) + + g_e = Group(name='empty_group') + g_e.store() + + return { + TestVerdiDataListable.NODE_ID_STR: struc.id, + TestVerdiDataListable.NON_EMPTY_GROUP_ID_STR: g_ne.id, + TestVerdiDataListable.EMPTY_GROUP_ID_STR: g_e.id + } + + @classmethod + def setUpClass(cls): + super(TestVerdiDataStructure, cls).setUpClass() + from aiida.orm import Computer + new_comp = Computer(name='comp', + hostname='localhost', + transport_type='local', + scheduler_type='direct', + workdir='/tmp/aiida') + new_comp.store() + cls.ids = cls.create_structure_data() + + def setUp(self): + from aiida.orm import Computer + self.comp = Computer.get('comp') + self.runner = CliRunner() + self.this_folder = os.path.dirname(__file__) + self.this_file = os.path.basename(__file__) + + self.cli_runner = CliRunner() + + def test_importhelp(self): + res = self.runner.invoke(structure.structure_import, ['--help']) + self.assertIn('Usage:', res.output_bytes, + 'The string "Usage: " was not found in the output' + ' of verdi data import --help') + + def test_import(self): + xyzcontent = ''' + 2 + + Fe 0.0 0.0 0.0 + O 2.0 2.0 2.0 + ''' + with tempfile.NamedTemporaryFile() as f: + + f.write(xyzcontent) + f.flush() + options = [f.name, + '--format', 'xyz', + '--vacuum-factor', '1.0', + '--vacuum-addition', '10.0', + '--pbc', '1', '1', '1', + ] + res = self.cli_runner.invoke(structure.structure_import, + options, catch_exceptions=False) + self.assertIn('Succesfully imported', res.output_bytes, + 'The string "Succesfully imported" was not found in the output' + ' of verdi data structure import.') + options.append('--dont-store') + res = self.cli_runner.invoke(structure.structure_import, + options, catch_exceptions=False) + self.assertIn('PK = None', res.output_bytes, + 'The string "PK = None" was not found in the output' + ' of verdi data structure import with --dont-store option.') + + def test_showhelp(self): + res = self.runner.invoke(structure.structure_import, ['--help']) + self.assertIn('Usage:', res.output_bytes, + 'The string "Usage: " was not found in the output' + ' of verdi data show --help') + + def test_deposithelp(self): + res = self.runner.invoke(structure.structure_import, ['--help']) + self.assertIn('Usage:', res.output_bytes, + 'The string "Usage: " was not found in the output' + ' of verdi data show --help') + + def test_list(self): + self.data_listing_test(StructureData, 'BaO3Ti', self.ids) + + def test_export(self): + from aiida.cmdline.commands.data.structure import SUPPORTED_FORMATS + self.data_export_test(StructureData, self.ids, SUPPORTED_FORMATS) + + +class TestVerdiDataCif(AiidaTestCase, TestVerdiDataListable, + TestVerdiDataExportable): + + valid_sample_cif_str = ''' + data_test + _cell_length_a 10 + _cell_length_b 10 + _cell_length_c 10 + _cell_angle_alpha 90 + _cell_angle_beta 90 + _cell_angle_gamma 90 + _chemical_formula_sum 'C O2' + loop_ + _atom_site_label + _atom_site_fract_x + _atom_site_fract_y + _atom_site_fract_z + _atom_site_attached_hydrogens + C 0 0 0 0 + O 0.5 0.5 0.5 . + H 0.75 0.75 0.75 0 + ''' + + @classmethod + def create_cif_data(cls): + with tempfile.NamedTemporaryFile() as f: + filename = f.name + f.write(cls.valid_sample_cif_str) + f.flush() + a = CifData(file=filename, + source={'version': '1234', + 'db_name': 'COD', + 'id': '0000001'}) + a.store() + + g_ne = Group(name='non_empty_group') + g_ne.store() + g_ne.add_nodes(a) + + g_e = Group(name='empty_group') + g_e.store() + + return { + TestVerdiDataListable.NODE_ID_STR: a.id, + TestVerdiDataListable.NON_EMPTY_GROUP_ID_STR: g_ne.id, + TestVerdiDataListable.EMPTY_GROUP_ID_STR: g_e.id + } + + + @classmethod + def setUpClass(cls): + super(TestVerdiDataCif, cls).setUpClass() + new_comp = Computer(name='comp', + hostname='localhost', + transport_type='local', + scheduler_type='direct', + workdir='/tmp/aiida') + new_comp.store() + + cls.ids = cls.create_cif_data() + + def setUp(self): + self.comp = Computer.get('comp') + self.runner = CliRunner() + self.this_folder = os.path.dirname(__file__) + self.this_file = os.path.basename(__file__) + + self.cli_runner = CliRunner() + + def test_list(self): + """ + This method tests that the Cif listing works as expected with all + possible flags and arguments. + """ + from aiida.orm.data.cif import CifData + + self.data_listing_test(CifData, 'C O2', self.ids) + + def test_showhelp(self): + options = ['--help'] + res = self.cli_runner.invoke(cif.show, options, + catch_exceptions=False) + self.assertIn('Usage:', res.output_bytes, + 'The string "Usage: " was not found in the output' + ' of verdi data show help') + + def test_deposithelp(self): + options = ['--help'] + res = self.cli_runner.invoke(cif.deposit, options, + catch_exceptions=False) + self.assertIn('Usage:', res.output_bytes, + 'The string "Usage: " was not found in the output' + ' of verdi data show deposit') + + def test_importhelp(self): + options = ['--help'] + res = self.cli_runner.invoke(cif.importfile, options, + catch_exceptions=False) + self.assertIn('Usage:', res.output_bytes, + 'The string "Usage: " was not found in the output' + ' of verdi data import help') + + def test_import(self): + with tempfile.NamedTemporaryFile() as f: + f.write(self.valid_sample_cif_str) + f.flush() + options = [f.name] + res = self.cli_runner.invoke(cif.importfile, options, + catch_exceptions=False) + self.assertIn('imported uuid', res.output_bytes, + 'The string "imported uuid" was not found in the output' + ' of verdi data import.') + + def test_export(self): + """ + This method checks if the Cif export works as expected with all + possible flags and arguments. + """ + from aiida.cmdline.commands.data.cif import SUPPORTED_FORMATS + self.data_export_test(CifData, self.ids, SUPPORTED_FORMATS) + +class TestVerdiDataUpf(AiidaTestCase): + """ + Testing verdi data upf + """ + @classmethod + def setUpClass(cls): + super(TestVerdiDataUpf, cls).setUpClass() + + def setUp(self): + self.this_folder = os.path.dirname(__file__) + self.this_file = os.path.basename(__file__) + self.pseudos_dir = "../../../../../examples/testdata/qepseudos/" + + self.cli_runner = CliRunner() + + def upload_family(self): + options = [self.this_folder+'/'+self.pseudos_dir, + "test_group", + "test description"] + res = self.cli_runner.invoke(upf.uploadfamily, options, + catch_exceptions=False) + self.assertIn('UPF files found: 3', res.output_bytes, + 'The string "UPF files found: 3" was not found in the' + ' output of verdi data upf uploadfamily') + + + def test_uploadfamilyhelp(self): + output = sp.check_output(['verdi', 'data', 'upf', 'uploadfamily', '--help']) + self.assertIn( + 'Usage:', output, + "Sub-command verdi data upf uploadfamily --help failed.") + + def test_uploadfamily(self): + self.upload_family() + options = [self.this_folder+'/'+self.pseudos_dir, + "test_group", + "test description", + "--stop-if-existing"] + with self.assertRaises(ValueError): + res = self.cli_runner.invoke(upf.uploadfamily, options, + catch_exceptions=False) + + + def test_exportfamilyhelp(self): + output = sp.check_output(['verdi', 'data', 'upf', 'exportfamily', '--help']) + self.assertIn( + 'Usage:', output, + "Sub-command verdi data upf exportfamily --help failed.") + + + def test_exportfamily(self): + self.upload_family() + + p = tempfile.mkdtemp() + options = [p, 'test_group'] + res = self.cli_runner.invoke(upf.exportfamily, options, + catch_exceptions=False) + output = sp.check_output(['ls', p ]) + self.assertIn( + 'Ba.pbesol-spn-rrkjus_psl.0.2.3-tot-pslib030.UPF', output, + "Sub-command verdi data upf exportfamily --help failed.") + self.assertIn( + 'O.pbesol-n-rrkjus_psl.0.1-tested-pslib030.UPF', output, + "Sub-command verdi data upf exportfamily --help failed.") + self.assertIn( + 'Ti.pbesol-spn-rrkjus_psl.0.2.3-tot-pslib030.UPF', output, + "Sub-command verdi data upf exportfamily --help failed.") + + + def test_listfamilieshelp(self): + output = sp.check_output(['verdi', 'data', 'upf', 'listfamilies', '--help']) + self.assertIn( + 'Usage:', output, + "Sub-command verdi data upf listfamilies --help failed.") + + def test_listfamilies(self): + self.upload_family() + + options = ['-d', '-e', 'Ba'] + res = self.cli_runner.invoke(upf.listfamilies, options, + catch_exceptions=False) + + self.assertIn('test_group', res.output_bytes, + 'The string "test_group" was not found in the' + ' output of verdi data upf listfamilies') + + self.assertIn('test description', res.output_bytes, + 'The string "test_group" was not found in the' + ' output of verdi data upf listfamilies') + + + options = ['-d', '-e', 'Fe'] + res = self.cli_runner.invoke(upf.listfamilies, options, + catch_exceptions=False) + self.assertIn('No valid UPF pseudopotential', res.output_bytes, + 'The string "No valid UPF pseudopotential" was not' + ' found in the output of verdi data upf listfamilies') + + def test_importhelp(self): + output = sp.check_output(['verdi', 'data', 'upf', 'import', '--help']) + self.assertIn( + 'Usage:', output, + "Sub-command verdi data upf listfamilies --help failed.") + + def test_import(self): + options = [self.this_folder + '/'+self.pseudos_dir + '/' + + 'Ti.pbesol-spn-rrkjus_psl.0.2.3-tot-pslib030.UPF', + '--format', + 'upf'] + res = self.cli_runner.invoke(upf.import_upf, options, + catch_exceptions=False) + + self.assertIn('Imported', res.output_bytes, + 'The string "Imported" was not' + ' found in the output of verdi data import' ) diff --git a/aiida/backends/tests/verdi_commands.py b/aiida/backends/tests/verdi_commands.py deleted file mode 100644 index ba3bde7e90..0000000000 --- a/aiida/backends/tests/verdi_commands.py +++ /dev/null @@ -1,518 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida_core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=missing-docstring,invalid-name,protected-access - -from aiida.backends.testbase import AiidaTestCase -from aiida.common.datastructures import calc_states -from aiida.utils.capturing import Capturing - -# Common computer information -computer_common_info = [ - "localhost", - "", - "True", - "ssh", - "torque", - "#!/bin/bash", - "/scratch/{username}/aiida_run", - "mpirun -np {tot_num_mpiprocs}", - "1", - EOFError, - EOFError, -] - -# Computer #1 -computer_name_1 = "torquessh1" -computer_setup_input_1 = [computer_name_1] + computer_common_info - -# Computer #2 -computer_name_2 = "torquessh2" -computer_setup_input_2 = [computer_name_2] + computer_common_info - -# Common code information -code_common_info_1 = [ - "simple script", - "False", - "simpleplugins.templatereplacer", -] -code_common_info_2 = [ - "/usr/local/bin/doubler.sh", - EOFError, - EOFError, -] - -# Code #1 -CODE_NAME_1 = "doubler_1" -# Code #2 -CODE_NAME_2 = "doubler_2" - -COMMON_CODE_SETUP_OPTS = [ - '--non-interactive', - '--description', 'simple script', - '--on-computer', - '--input-plugin', 'simpleplugins.templatereplacer', - '--remote-abs-path', '/usr/local/bin/doubler.sh', - '--prepend-text=', '--append-text=' -] # yapf: disable - -CODE_SETUP_OPTS_1 = COMMON_CODE_SETUP_OPTS + [ - '--label', CODE_NAME_1, - '--computer', computer_name_1, -] # yapf: disable - -CODE_SETUP_OPTS_2 = COMMON_CODE_SETUP_OPTS + [ - '--label', CODE_NAME_2, - '--computer', computer_name_2, -] # yapf: disable - - -# pylint: disable=no-self-use - - -class TestVerdiDataCommands(AiidaTestCase): - cmd_to_nodeid_map = dict() - cmd_to_nodeid_map_for_groups = dict() - cmd_to_nodeid_map_for_nuser = dict() - - group_name = "trj_group" - group_id = None - - @classmethod - def create_trajectory_data(cls, cmd_to_nodeid_map, cmd_to_nodeid_map_for_groups, cmd_to_nodeid_map_for_nuser, group, - new_user): - - from aiida.orm.data.array.trajectory import TrajectoryData - from aiida.cmdline.commands.data import _Trajectory - import numpy - - # Create the Trajectory data nodes - tjn1 = TrajectoryData() - - # I create sample data - stepids = numpy.array([60, 70]) - times = stepids * 0.01 - cells = numpy.array([[[2., 0., 0.], [0., 2., 0.], [0., 0., 2.]], [[3., 0., 0.], [0., 3., 0.], [0., 0., 3.]]]) - symbols = numpy.array(['H', 'O', 'C']) - positions = numpy.array([[[0., 0., 0.], [0.5, 0.5, 0.5], [1.5, 1.5, 1.5]], [[0., 0., 0.], [0.5, 0.5, 0.5], - [1.5, 1.5, 1.5]]]) - velocities = numpy.array([[[0., 0., 0.], [0., 0., 0.], [0., 0., 0.]], [[0.5, 0.5, 0.5], [0.5, 0.5, 0.5], - [-0.5, -0.5, -0.5]]]) - - # I set the node - tjn1.set_trajectory( - stepids=stepids, cells=cells, symbols=symbols, positions=positions, times=times, velocities=velocities) - tjn1.store() - - tjn2 = TrajectoryData() - tjn2.set_trajectory( - stepids=stepids, cells=cells, symbols=symbols, positions=positions, times=times, velocities=velocities) - tjn2.store() - - # Keep track of the created objects - cmd_to_nodeid_map[_Trajectory] = [tjn1.id, tjn2.id] - - # Add the second Trajectory data to the group - group.add_nodes([tjn2]) - # Keep track of the id of the node that you added to the group - cmd_to_nodeid_map_for_groups[_Trajectory] = tjn2.id - - # Create a trajectory data that belongs to another user - tjn3 = TrajectoryData() - tjn3.set_trajectory( - stepids=stepids, cells=cells, symbols=symbols, positions=positions, times=times, velocities=velocities) - tjn3.set_user(new_user) - tjn3.store() - - # Put it is to the right map - cmd_to_nodeid_map_for_nuser[_Trajectory] = [tjn3.id] - - @classmethod - def create_cif_data(cls, cmd_to_nodeid_map, cmd_to_nodeid_map_for_groups, cmd_to_nodeid_map_for_nuser, group, - new_user): - - from aiida.orm.data.cif import CifData - from aiida.cmdline.commands.data import _Cif - import tempfile - - # Create the CIF data nodes - with tempfile.NamedTemporaryFile() as f: - f.write(''' - data_9012064 - _space_group_IT_number 166 - _symmetry_space_group_name_H-M 'R -3 m :H' - _cell_angle_alpha 90 - _cell_angle_beta 90 - _cell_angle_gamma 120 - _cell_length_a 4.395 - _cell_length_b 4.395 - _cell_length_c 30.440 - _cod_database_code 9012064 - loop_ - _atom_site_label - _atom_site_fract_x - _atom_site_fract_y - _atom_site_fract_z - _atom_site_U_iso_or_equiv - Bi 0.00000 0.00000 0.40046 0.02330 - Te1 0.00000 0.00000 0.00000 0.01748 - Te2 0.00000 0.00000 0.79030 0.01912 - ''') - f.flush() - c1 = CifData(file=f.name) - c1.store() - c2 = CifData(file=f.name) - c2.store() - - # Keep track of the created objects - cmd_to_nodeid_map[_Cif] = [c1.id, c2.id] - - # Add the second CIF data to the group - group.add_nodes([c2]) - # Keep track of the id of the node that you added to the group - cmd_to_nodeid_map_for_groups[_Cif] = c2.id - - # Create a Cif node belonging to another user - c3 = CifData(file=f.name) - c3.set_user(new_user) - c3.store() - - # Put it is to the right map - cmd_to_nodeid_map_for_nuser[_Cif] = [c3.id] - - @classmethod - def sub_create_bands_data(cls, user=None): - from aiida.orm.data.array.kpoints import KpointsData - from aiida.orm import JobCalculation - from aiida.orm.data.structure import StructureData - from aiida.common.links import LinkType - from aiida.orm.data.array.bands import BandsData - import numpy - - s = StructureData(cell=((2., 0., 0.), (0., 2., 0.), (0., 0., 2.))) - s.append_atom(position=(0., 0., 0.), symbols=['Ba', 'Ti'], weights=(1., 0.), name='mytype') - if user is not None: - s.set_user(user) - s.store() - - c = JobCalculation(computer=cls.computer, resources={'num_machines': 1, 'num_mpiprocs_per_machine': 1}) - if user is not None: - c.set_user(user) - c.store() - c.add_link_from(s, "S1", LinkType.INPUT) - c._set_state(calc_states.RETRIEVING) - - # define a cell - alat = 4. - cell = numpy.array([ - [alat, 0., 0.], - [0., alat, 0.], - [0., 0., alat], - ]) - - k = KpointsData() - k.set_cell(cell) - k.set_kpoints_path() - if user is not None: - k.set_user(user) - k.store() - - b = BandsData() - b.set_kpointsdata(k) - input_bands = numpy.array([numpy.ones(4) * i for i in range(k.get_kpoints().shape[0])]) - b.set_bands(input_bands, units='eV') - if user is not None: - b.set_user(user) - b.store() - - b.add_link_from(c, link_type=LinkType.CREATE) - - return b - - @classmethod - def create_bands_data(cls, cmd_to_nodeid_map, cmd_to_nodeid_map_for_groups, cmd_to_nodeid_map_for_nuser, group, - new_user): - from aiida.cmdline.commands.data import _Bands - - b1 = cls.sub_create_bands_data() - b2 = cls.sub_create_bands_data() - - # Keep track of the created objects - cmd_to_nodeid_map[_Bands] = [b1.id, b2.id] - - # Add the second Kpoint & Bands data to the group - group.add_nodes([b2]) - # Keep track of the id of the node that you added to the group - cmd_to_nodeid_map_for_groups[_Bands] = b2.id - - b3 = cls.sub_create_bands_data(user=new_user) - # Put it is to the right map (of the different user) - cmd_to_nodeid_map_for_nuser[_Bands] = [b3.id] - - @classmethod - def create_structure_data(cls, cmd_to_nodeid_map, cmd_to_nodeid_map_for_groups, cmd_to_nodeid_map_for_nuser, group, - new_user): - from aiida.orm.data.structure import StructureData - from aiida.cmdline.commands.data import _Structure - - s1 = StructureData(cell=((2., 0., 0.), (0., 2., 0.), (0., 0., 2.))) - s1.append_atom(position=(0., 0., 0.), symbols=['Ba', 'Ti'], weights=(1., 0.), name='mytype') - s1.store() - - s2 = StructureData(cell=((2., 0., 0.), (0., 2., 0.), (0., 0., 2.))) - s2.append_atom(position=(0., 0., 0.), symbols=['Ba', 'Ti'], weights=(1., 0.), name='mytype') - s2.store() - - # Keep track of the created objects - cmd_to_nodeid_map[_Structure] = [s1.id, s2.id] - - # Add the second Kpoint & Bands data to the group - group.add_nodes([s2]) - # Keep track of the id of the node that you added to the group - cmd_to_nodeid_map_for_groups[_Structure] = s2.id - - # Create a StructureData node belonging to another user - s3 = StructureData(cell=((2., 0., 0.), (0., 2., 0.), (0., 0., 2.))) - s3.append_atom(position=(0., 0., 0.), symbols=['Ba', 'Ti'], weights=(1., 0.), name='mytype') - s3.set_user(new_user) - s3.store() - - # Put it is to the right map - cmd_to_nodeid_map_for_nuser[_Structure] = [s3.id] - - @classmethod - def setUpClass(cls, *args, **kwargs): - """ - Create some data needed for the tests - """ - super(TestVerdiDataCommands, cls).setUpClass() - - from aiida.orm.group import Group - - # Create a secondary user - new_email = "newuser@new.n" - new_user = cls.backend.users.create(email=new_email) - new_user.store() - - # Create a group to add specific data inside - g1 = Group(name=cls.group_name) - g1.store() - cls.group_id = g1.id - - cls.create_bands_data(cls.cmd_to_nodeid_map, cls.cmd_to_nodeid_map_for_groups, cls.cmd_to_nodeid_map_for_nuser, - g1, new_user) - - cls.create_structure_data(cls.cmd_to_nodeid_map, cls.cmd_to_nodeid_map_for_groups, - cls.cmd_to_nodeid_map_for_nuser, g1, new_user) - - cls.create_cif_data(cls.cmd_to_nodeid_map, cls.cmd_to_nodeid_map_for_groups, cls.cmd_to_nodeid_map_for_nuser, - g1, new_user) - - cls.create_trajectory_data(cls.cmd_to_nodeid_map, cls.cmd_to_nodeid_map_for_groups, - cls.cmd_to_nodeid_map_for_nuser, g1, new_user) - - def test_trajectory_simple_listing(self): - from aiida.cmdline.commands.data import _Bands - from aiida.cmdline.commands.data import _Structure - from aiida.cmdline.commands.data import _Cif - from aiida.cmdline.commands.data import _Trajectory - - sub_cmds = [_Bands, _Structure, _Cif, _Trajectory] - for sub_cmd in sub_cmds: - with Capturing() as output: - sub_cmd().list() - - out_str = ' '.join(output) - - for nid in self.cmd_to_nodeid_map[sub_cmd]: - if str(nid) not in out_str: - self.fail("The data objects ({}) with ids {} and {} " - "were not found. " - .format(sub_cmd, str(self.cmd_to_nodeid_map[sub_cmd][0]), - str(self.cmd_to_nodeid_map[sub_cmd][1])) + "The output was {}".format(out_str)) - - def test_trajectory_all_user_listing(self): - from aiida.cmdline.commands.data import _Bands - from aiida.cmdline.commands.data import _Structure - from aiida.cmdline.commands.data import _Cif - from aiida.cmdline.commands.data import _Trajectory - - sub_cmds = [_Bands, _Structure, _Cif, _Trajectory] - for sub_cmd in sub_cmds: - args_to_test = [['-A'], ['--all-users']] - for arg in args_to_test: - curr_scmd = sub_cmd() - with Capturing() as output: - curr_scmd.list(*arg) - - out_str = ' '.join(output) - - for nid in self.cmd_to_nodeid_map[sub_cmd] + self.cmd_to_nodeid_map_for_nuser[sub_cmd]: - if str(nid) not in out_str: - self.fail("The data objects ({}) with ids {} and {} " - "were not found. ".format(sub_cmd, str(self.cmd_to_nodeid_map[sub_cmd][0]), - str(self.cmd_to_nodeid_map[sub_cmd][1])) + - "The output was {}".format(out_str)) - - def test_trajectory_past_days_listing(self): - from aiida.cmdline.commands.data import _Bands - from aiida.cmdline.commands.data import _Structure - from aiida.cmdline.commands.data import _Cif - from aiida.cmdline.commands.data import _Trajectory - - sub_cmds = [_Bands, _Structure, _Cif, _Trajectory] - for sub_cmd in sub_cmds: - args_to_test = [['-p', '0'], ['--past-days', '0']] - for arg in args_to_test: - curr_scmd = sub_cmd() - with Capturing() as output: - curr_scmd.list(*arg) - out_str = ' '.join(output) - - # This should be an empty output - for nid in self.cmd_to_nodeid_map[sub_cmd]: - if str(nid) in out_str: - self.fail("No data objects should be retrieved and " - "some were retrieved. The (concatenation of " - "the) output was: {}".format(out_str)) - - args_to_test = [['-p', '1'], ['--past-days', '1']] - for arg in args_to_test: - curr_scmd = sub_cmd() - with Capturing() as output: - curr_scmd.list(*arg) - out_str = ' '.join(output) - - for nid in self.cmd_to_nodeid_map[sub_cmd]: - if str(nid) not in out_str: - self.fail("The data objects ({}) with ids {} and {} " - "were not found. ".format(sub_cmd, str(self.cmd_to_nodeid_map[sub_cmd][0]), - str(self.cmd_to_nodeid_map[sub_cmd][1])) + - "The output was {}".format(out_str)) - - def test_trajectory_group_listing(self): - from aiida.cmdline.commands.data import _Bands - from aiida.cmdline.commands.data import _Structure - from aiida.cmdline.commands.data import _Cif - from aiida.cmdline.commands.data import _Trajectory - - args_to_test = [['-g', self.group_name], ['--group-name', self.group_name], ['-G', str(self.group_id)], - ['--group-pk', str(self.group_id)]] - - sub_cmds = [_Bands, _Structure, _Cif, _Trajectory] - for sub_cmd in sub_cmds: - for arg in args_to_test: - curr_scmd = sub_cmd() - with Capturing() as output: - curr_scmd.list(*arg) - out_str = ' '.join(output) - - if str(self.cmd_to_nodeid_map_for_groups[sub_cmd]) not in out_str: - self.fail( - "The data object ({}) with id {} " - "was not found. ".format( - sub_cmd, - str(self.cmd_to_nodeid_map_for_groups[sub_cmd]) + "The output was {}".format(out_str))) - - -class TestVerdiDataRemoteCommands(AiidaTestCase): - """ - Test the commands under 'verdi data remote' - - Implicitly also tests creating and configuring a computer with a local transport - """ - - @classmethod - def setUpClass(cls, *args, **kwargs): - """ - Create a configured computer to - """ - from aiida.orm import Computer - from aiida.cmdline.commands.computer import Computer as ComputerCmd - from aiida.orm.backend import construct_backend - - super(TestVerdiDataRemoteCommands, cls).setUpClass() - - backend = construct_backend() - - cls.computer_name = 'test_remote_ls' - cls.new_comp = Computer( - name=cls.computer_name, - hostname='localhost', - transport_type='local', - scheduler_type='direct', - workdir='/tmp/aiida') - cls.new_comp.store() - - # I need to configure the computer here; being 'local', - # there should not be any options asked here - with Capturing(): - ComputerCmd().run('configure', cls.computer_name) - - assert cls.new_comp.is_user_configured( - backend.users.get_automatic_user()), "There was a problem configuring the test computer" - - def test_remote_ls(self): - """ - Test if the 'verdi remote ls' command works - """ - from aiida.cmdline.commands.data import _Remote - import os - from aiida.orm.data.remote import RemoteData - from aiida.common.folders import SandboxFolder - - with SandboxFolder() as folder: - files = {'test1.txt': 'the_content_1', 'test2.txt': 'the_content_2'} - for fname, content in files.items(): - with open(os.path.join(folder.abspath, fname), 'w') as f: - f.write(content) - - r = RemoteData(computer=self.new_comp, remote_path=folder.abspath) - r.store() - - with Capturing() as output: - _Remote().run('ls', str(r.pk)) - - # output is a Capturing objects, looping on it returns the lines - found_files = set(output) - self.assertEquals(set(files.keys()), found_files) - - # Testing also ls -l, that calls a different implementation - with Capturing() as output: - _Remote().run('ls', '-l', str(r.pk)) - - # The filename is the last part of each line - found_files = set(_.split()[-1] for _ in output) - self.assertEquals(set(files.keys()), found_files) - - def test_remote_cat(self): - """ - Test if the 'verdi remote ls' command works - """ - from aiida.cmdline.commands.data import _Remote - import os - from aiida.orm.data.remote import RemoteData - from aiida.common.folders import SandboxFolder - - with SandboxFolder() as folder: - - files = {'test1.txt': 'the_content_1\nsecond line', 'test2.txt': 'the_content_2'} - for fname, content in files.items(): - with open(os.path.join(folder.abspath, fname), 'w') as f: - f.write(content) - - r = RemoteData(computer=self.new_comp, remote_path=folder.abspath) - r.store() - - for fname, content in files.items(): - with Capturing() as output: - _Remote().run('cat', str(r.pk), fname) - - self.assertEquals("\n".join(output), content, "The file content for file {} differs: {} vs. {}".format( - fname, "\n".join(output), content)) diff --git a/aiida/cmdline/commands/data.py b/aiida/cmdline/commands/data.py deleted file mode 100644 index 2dd460f2c4..0000000000 --- a/aiida/cmdline/commands/data.py +++ /dev/null @@ -1,2142 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida_core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -import sys - -import click - -from aiida.backends.utils import load_dbenv, is_dbenv_loaded -from aiida.cmdline import delayed_load_node as load_node -from aiida.cmdline.baseclass import ( - VerdiCommandRouter, VerdiCommandWithSubcommands) -from aiida.cmdline.commands.node import _Label, _Description -from aiida.common.exceptions import MultipleObjectsError -from aiida.cmdline.commands import verdi -from aiida.plugins.entry_point import get_entry_point_names - - -class Data(VerdiCommandRouter): - """ - Setup and manage data specific types - - There is a list of subcommands for managing specific types of data. - For instance, 'data upf' manages pseudopotentials in the UPF format. - """ - - def __init__(self): - """ - A dictionary with valid commands and functions to be called. - """ - ## Add here the classes to be supported. - self.routed_subcommands = { - 'upf': _Upf, - 'structure': _Structure, - 'bands': _Bands, - 'cif': _Cif, - 'trajectory': _Trajectory, - 'parameter': _Parameter, - 'array': _Array, - 'label': _Label, - 'remote': _Remote, - 'description': _Description, - } - entry_point_group = 'aiida.cmdline.data' - for entry_point_name in get_entry_point_names(entry_point_group): - self.routed_subcommands[entry_point_name] = verdi - -class Listable(object): - """ - Provides shell completion for listable data nodes. - - .. note:: classes, inheriting Listable, MUST define value for property - ``dataclass`` (preferably in ``__init__``), which - has to point to correct \*Data class. - """ - - def list(self, *args): - """ - List all instances of given data class. - - :param args: a list of command line arguments. - """ - import argparse - - parser = argparse.ArgumentParser( - prog=self.get_full_command_name(), - description='List {} objects.'.format(self.dataclass.__name__)) - - self.append_list_cmdline_arguments(parser) - - parser.add_argument('--vseparator', default="\t", - help="specify vertical separator for fields. " - "Default '\\t'.", - type=str, action='store') - parser.add_argument('--header', default=True, - help="print a header with column names. " - "Default option.", - dest="header", action='store_true') - parser.add_argument('--no-header', '-H', - help="do not print a header with column names.", - dest="header", action='store_false') - - args = list(args) - parsed_args = parser.parse_args(args) - - # print "parsed_args ===>", parsed_args - - entry_list = self.query(parsed_args) - - vsep = parsed_args.vseparator - if entry_list: - to_print = "" - if parsed_args.header: - to_print += vsep.join(self.get_column_names()) + "\n" - for entry in sorted(entry_list, key=lambda x: int(x[0])): - to_print += vsep.join(entry) + "\n" - sys.stdout.write(to_print) - - def query(self, args): - """ - Perform the query - """ - if not is_dbenv_loaded(): - load_dbenv() - - from aiida.orm.querybuilder import QueryBuilder - from aiida.orm.implementation import Group - from aiida.orm.user import User - from aiida.orm.backend import construct_backend - - backend = construct_backend() - - qb = QueryBuilder() - if args.all_users is False: - user = backend.users.get_automatic_user() - qb.append(User, tag="creator", filters={"email": user.email}) - else: - qb.append(User, tag="creator") - - data_filters = dict() - self.query_past_days(data_filters, args) - qb.append(self.dataclass, tag="data", created_by="creator", - filters=data_filters, project=["id"]) - - group_filters = {} - self.query_group(group_filters, args) - if group_filters: - qb.append(Group, tag="group", filters=group_filters, - group_of="data") - - qb.order_by({self.dataclass: {'ctime': 'asc'}}) - - object_list = qb.distinct() - - entry_list = [] - for [id] in object_list.all(): - entry_list.append([str(id)]) - return entry_list - - def query_past_days(self, filters, args): - """ - Subselect to filter data nodes by their age. - - :param filters: the filters to be enriched. - :param args: a namespace with parsed command line parameters. - """ - from aiida.utils import timezone - import datetime - if args.past_days is not None: - now = timezone.now() - n_days_ago = now - datetime.timedelta(days=args.past_days) - filters.update({"ctime": {'>=': n_days_ago}}) - return filters - - def query_group(self, filters, args): - """ - Subselect to filter data nodes by their group. - - :param q_object: a query object - :param args: a namespace with parsed command line parameters. - """ - if args.group_name is not None: - filters.update({"name": {"in": args.group_name}}) - if args.group_pk is not None: - filters.update({"id": {"in": args.group_pk}}) - - def append_list_cmdline_arguments(self, parser): - """ - Append additional command line parameters, that are later parsed and - used in the query construction. - - :param parser: instance of argparse.ArgumentParser - """ - parser.add_argument('-p', '--past-days', metavar='N', - help="add a filter to show only objects created in the past N days", - type=int, action='store') - parser.add_argument('-g', '--group-name', metavar='N', nargs="+", default=None, - help="add a filter to show only objects belonging to groups", - type=str, action='store') - parser.add_argument('-G', '--group-pk', metavar='N', nargs="+", default=None, - help="add a filter to show only objects belonging to groups", - type=int, action='store') - parser.add_argument('-A', '--all-users', action='store_true', default=False, - help="show objects for all users, rather than only for the" - "current user") - - def get_column_names(self): - """ - Return the list with column names. - - .. note:: neither the number nor correspondence of column names and - actual columns in the output from the :py:meth:`query` are checked. - """ - return ["ID"] - - -class Visualizable(object): - """ - Provides shell completion for visualizable data nodes. - - .. note:: classes, inheriting Visualizable, MUST NOT contain - attributes, starting with ``_show_``, which are not plugins for - visualization. - - In order to specify a default visualization format, one has to override - ``_default_show_format`` property (preferably in - ``__init__``), setting it to the name of default visualization tool. - """ - show_prefix = '_show_' - show_parameters_postfix = '_parameters' - - def get_show_plugins(self): - """ - Get the list of all implemented plugins for visualizing the structure. - """ - method_names = dir(self) # get list of class methods names - valid_formats = [i[len(self.show_prefix):] for i in method_names - if i.startswith(self.show_prefix) and \ - not i.endswith(self.show_parameters_postfix)] # filter - - return {k: getattr(self, self.show_prefix + k) for k in valid_formats} - - def show(self, *args): - """ - Show the data node with a visualization program. - """ - # DEVELOPER NOTE: to add a new plugin, just add a _show_xxx() method. - import argparse, os - - parser = argparse.ArgumentParser( - prog=self.get_full_command_name(), - description='Visualize data object.') - parser.add_argument('data_id', type=int, default=None, nargs="+", - help="ID of the data object to be visualized.") - - default_format = None - try: - default_format = self._default_show_format - except AttributeError: - if len(self.get_show_plugins().keys()) == 1: - default_format = self.get_show_plugins().keys()[0] - else: - default_format = None - - parser.add_argument('--format', '-f', type=str, default=default_format, - help="Type of the visualization format/tool.", - choices=sorted(self.get_show_plugins().keys())) - - # Augmenting the command line parameters with ones, that are used by - # individual plugins - for cmd in dir(self): - if not cmd.startswith(self.show_prefix) or \ - not cmd.endswith(self.show_parameters_postfix): - continue - getattr(self, cmd)(parser) - - args = list(args) - parsed_args = vars(parser.parse_args(args)) - - data_id = parsed_args.pop('data_id') - format = parsed_args.pop('format') - - # Removing the keys, whose values are None - for key in parsed_args.keys(): - if parsed_args[key] is None: - parsed_args.pop(key) - - if format is None: - print >> sys.stderr, ( - "Default format is not defined, please specify.\n" - "Valid formats are:") - for i in self.get_show_plugins().keys(): - print >> sys.stderr, " {}".format(i) - sys.exit(1) - - # I can give in input the whole path to executable - code_name = os.path.split(format)[-1] - - try: - func = self.get_show_plugins()[code_name] - except KeyError: - print >> sys.stderr, "Not implemented; implemented plugins are:" - print >> sys.stderr, "{}.".format( - ",".join(self.get_show_plugins())) - sys.exit(1) - - if not is_dbenv_loaded(): - load_dbenv() - - n_list = [load_node(id) for id in data_id] - - for n in n_list: - try: - if not isinstance(n, self.dataclass): - print >> sys.stderr, ("Node {} is of class {} instead " - "of {}".format(n, type(n), self.dataclass)) - sys.exit(1) - except AttributeError: - pass - - try: - func(format, n_list, **parsed_args) - except MultipleObjectsError: - print >> sys.stderr, ( - "Visualization of multiple objects is not implemented " - "for '{}'".format(format)) - sys.exit(1) - - -class Exportable(object): - """ - Provides shell completion for exportable data nodes. - - .. note:: classes, inheriting Exportable, MUST NOT contain attributes, - starting with ``_export_``, which are not plugins for exporting. - """ - export_prefix = '_export_' - export_parameters_postfix = '_parameters' - - def append_export_cmdline_arguments(self, parser): - """ - Function (to be overloaded in a subclass) to add custom export command - line arguments. - - :param parser: a ArgParse parser object - :return: change the parser in place - """ - pass - - def get_export_plugins(self): - """ - Get the list of all implemented exporters for data class. - """ - method_names = dir(self) # get list of class methods names - valid_formats = [i[len(self.export_prefix):] for i in method_names - if i.startswith(self.export_prefix) and \ - not i.endswith(self.export_parameters_postfix)] # filter - - return {k: getattr(self, self.export_prefix + k) for k in valid_formats} - - def export(self, *args): - """ - Export the data node to a given format. - """ - # DEVELOPER NOTE: to add a new plugin, just add a _export_xxx() method. - import argparse - - parser = argparse.ArgumentParser( - prog=self.get_full_command_name(), - description='Export data object.') - parser.add_argument('-o', '--output', type=str, default='', - help="If present, store the output directly on a file " - "with the given name. It is essential to use this option " - "if more than one file needs to be created.") - parser.add_argument('-y', '--overwrite', action='store_true', - help="If passed, overwrite files without checking.") - parser.add_argument('data_id', type=int, default=None, - help="ID of the data object to be visualized.") - - self.append_export_cmdline_arguments(parser) - - default_format = None - try: - default_format = self._default_export_format - except AttributeError: - if len(self.get_export_plugins().keys()) == 1: - default_format = self.get_export_plugins().keys()[0] - else: - default_format = None - - parser.add_argument('--format', '-f', type=str, default=default_format, - help="Type of the exported file.", - choices=sorted(self.get_export_plugins().keys())) - - # Augmenting the command line parameters with ones, that are used by - # individual plugins - for cmd in dir(self): - if not cmd.startswith(self.export_prefix) or \ - not cmd.endswith(self.export_parameters_postfix): - continue - getattr(self, cmd)(parser) - - args = list(args) - parsed_args = vars(parser.parse_args(args)) - - format = parsed_args.pop('format') - data_id = parsed_args.pop('data_id') - - # Removing the keys, whose values are None - for key in parsed_args.keys(): - if parsed_args[key] is None: - parsed_args.pop(key) - - if format is None: - print >> sys.stderr, ( - "Default format is not defined, please specify.\n" - "Valid formats are:") - for i in sorted(self.get_export_plugins().keys()): - print >> sys.stderr, " {}".format(i) - sys.exit(1) - - output_fname = parsed_args.pop('output') - if not output_fname: - output_fname = "" - - overwrite = parsed_args.pop('overwrite') - - # if parsed_args: - # raise InternalError( - # "Some command line parameters were not properly parsed: {}".format( - # parsed_args.keys() - # )) - - try: - func = self.get_export_plugins()[format] - except KeyError: - print >> sys.stderr, "Not implemented; implemented plugins are:" - print >> sys.stderr, "{}.".format( - ",".join(self.get_export_plugins())) - sys.exit(1) - - if not is_dbenv_loaded(): - load_dbenv() - - n = load_node(data_id) - - try: - if not isinstance(n, self.dataclass): - print >> sys.stderr, ("Node {} is of class {} instead " - "of {}".format(n, type(n), self.dataclass)) - sys.exit(1) - except AttributeError: - pass - - func(n, output_fname=output_fname, overwrite=overwrite, **parsed_args) - - def print_or_store(self, node, output_fname, fileformat, other_args={}, overwrite=False): - """ - Depending on the parameters, either print the (single) output file on screen, or - stores the file(s) on disk. - - :param node: the Data node to print or store on disk - :param output_fname: The filename to store the main file. If empty or None, print - instead - :param fileformat: a string to pass to the _exportstring method - :param other_args: a dictionary with additional kwargs to pass to _exportstring - :param overwrite: if False, stops if any file already exists (when output_fname - is not empty - - :note: this function calls directly sys.exit(1) when an error occurs (or e.g. if - check_overwrite is True and a file already exists). - """ - try: - if output_fname: - try: - node.export( - output_fname, fileformat=fileformat, overwrite=overwrite, **other_args) - except OSError as e: - print >> sys.stderr, "verdi: ERROR while exporting file:" - print >> sys.stderr, e.message - sys.exit(1) - else: - filetext, extra_files = node._exportstring( - fileformat, main_file_name=output_fname, **other_args) - if extra_files: - print >> sys.stderr, "This format requires to write more than one file." - print >> sys.stderr, "You need to pass the -o option to specify a file name." - sys.exit(1) - else: - print filetext - except TypeError as e: - # This typically occurs for parameters that are passed down to the - # methods in, e.g., BandsData, but they are not accepted - print >> sys.stderr, "verdi: ERROR, probably a parameter is not supported by the specific format." - print >> sys.stderr, "Error message: {}".format(e.message) - raise - - sys.exit(1) - - -class Importable(object): - """ - Provides shell completion for importable data nodes. - - .. note:: classes, inheriting Importable, MUST NOT contain attributes, - starting with ``_import_``, which are not plugins for importing. - """ - import_prefix = '_import_' - import_parameters_postfix = '_parameters' - - def get_import_plugins(self): - """ - Get the list of all implemented importers for data class. - """ - method_names = dir(self) # get list of class methods names - valid_formats = [i[len(self.import_prefix):] for i in method_names - if i.startswith(self.import_prefix) and \ - not i.endswith(self.import_parameters_postfix)] # filter - - return {k: getattr(self, self.import_prefix + k) for k in valid_formats} - - def importfile(self, *args): - import argparse, sys - - parser = argparse.ArgumentParser( - prog=self.get_full_command_name(), - description='Import data object.') - parser.add_argument('--file', type=str, default=None, - help="Path of the imported file. Reads from " - "standard input if not specified.") - - default_format = None - try: - default_format = self._default_import_format - except AttributeError: - if len(self.get_import_plugins().keys()) == 1: - default_format = self.get_import_plugins().keys()[0] - else: - default_format = None - - parser.add_argument('--format', '-f', type=str, default=default_format, - help="Type of the imported file.", - choices=sorted(self.get_import_plugins().keys())) - - # Augmenting the command line parameters with ones, that are used by - # individual plugins - for cmd in dir(self): - if not cmd.startswith(self.import_prefix) or \ - not cmd.endswith(self.import_parameters_postfix): - continue - getattr(self, cmd)(parser) - - args = list(args) - parsed_args = vars(parser.parse_args(args)) - - format = parsed_args.pop('format') - filename = parsed_args.pop('file') - - if format is None: - print >> sys.stderr, ( - "Default format is not defined, please specify.\n" - "Valid formats are:") - for i in self.get_import_plugins().keys(): - print >> sys.stderr, " {}".format(i) - sys.exit(1) - - if not filename: - filename = "/dev/stdin" - - try: - func = self.get_import_plugins()[format] - except KeyError: - print >> sys.stderr, "Not implemented; implemented plugins are:" - print >> sys.stderr, "{}.".format( - ",".join(self.get_import_plugins())) - sys.exit(1) - - if not is_dbenv_loaded(): - load_dbenv() - - func(filename, **parsed_args) - - -class Depositable(object): - """ - Provides shell completion for depositable data nodes. - - .. note:: classes, inheriting Depositable, MUST NOT contain - attributes, starting with ``_deposit_``, which are not plugins for - depositing. - """ - deposit_prefix = '_deposit_' - deposit_parameters_postfix = '_parameters' - - def get_deposit_plugins(self): - """ - Get the list of all implemented deposition methods for data class. - """ - method_names = dir(self) # get list of class methods names - valid_formats = [i[len(self.deposit_prefix):] for i in method_names - if i.startswith(self.deposit_prefix) and \ - not i.endswith(self.deposit_parameters_postfix)] # filter - - return {k: getattr(self, self.deposit_prefix + k) for k in valid_formats} - - def deposit(self, *args): - """ - Deposit the data node to a given database. - - :param args: a namespace with parsed command line parameters. - """ - # DEVELOPER NOTE: to add a new plugin, just add a _deposit_xxx() method. - import argparse - parser = argparse.ArgumentParser( - prog=self.get_full_command_name(), - description='Deposit data object.') - parser.add_argument('data_id', type=int, default=None, - help="ID of the data object to be deposited.") - - default_database = None - try: - default_database = self._default_deposition_database - except AttributeError: - if len(self.get_deposit_plugins().keys()) == 1: - default_database = self.get_deposit_plugins().keys()[0] - else: - default_database = None - - parser.add_argument('--database', '-d', type=str, default=default_database, - help="Label of the database for deposition.", - choices=self.get_deposit_plugins().keys()) - - # Augmenting the command line parameters with ones, that are used by - # individual plugins - for cmd in dir(self): - if not cmd.startswith(self.deposit_prefix) or \ - not cmd.endswith(self.deposit_parameters_postfix): - continue - getattr(self, cmd)(parser) - - args = list(args) - parsed_args = vars(parser.parse_args(args)) - - database = parsed_args.pop('database') - data_id = parsed_args.pop('data_id') - - # Removing the keys, whose values are None - for key in parsed_args.keys(): - if parsed_args[key] is None: - parsed_args.pop(key) - - if database is None: - print >> sys.stderr, ( - "Default database is not defined, please specify.\n" - "Valid databases are:") - for i in self.get_deposit_plugins().keys(): - print >> sys.stderr, " {}".format(i) - sys.exit(1) - - try: - func = self.get_deposit_plugins()[database] - except KeyError: - print >> sys.stderr, "Not implemented; implemented plugins are:" - print >> sys.stderr, "{}.".format( - ",".join(self.get_deposit_plugins())) - sys.exit(1) - - if not is_dbenv_loaded(): - load_dbenv() - - n = load_node(data_id) - - try: - if not isinstance(n, self.dataclass): - print >> sys.stderr, ("Node {} is of class {} instead " - "of {}".format(n, type(n), self.dataclass)) - sys.exit(1) - except AttributeError: - pass - - calc = func(n, **parsed_args) - print calc - - -# Note: this class should not be exposed directly in the main module, -# otherwise it becomes a command of 'verdi'. Instead, we want it to be a -# subcommand of verdi data. -class _Upf(VerdiCommandWithSubcommands, Importable): - """ - Setup and manage upf to be used - - This command allows to list and configure upf. - """ - - def __init__(self): - """ - A dictionary with valid commands and functions to be called. - """ - if not is_dbenv_loaded(): - load_dbenv() - from aiida.orm.data.upf import UpfData - - self.dataclass = UpfData - self.valid_subcommands = { - 'uploadfamily': (self.uploadfamily, self.complete_auto), - 'listfamilies': (self.listfamilies, self.complete_none), - 'import': (self.importfile, self.complete_none), - 'exportfamily': (self.exportfamily, self.complete_auto) - } - - def uploadfamily(self, *args): - """ - Upload a new pseudopotential family. - - Returns the numbers of files found and the number of nodes uploaded. - - Call without parameters to get some help. - """ - import os.path - - if not len(args) == 3 and not len(args) == 4: - print >> sys.stderr, ("After 'upf uploadfamily' there should be three " - "arguments:") - print >> sys.stderr, ("folder, group_name, group_description " - "[OPTIONAL: --stop-if-existing]\n") - sys.exit(1) - - folder = os.path.abspath(args[0]) - group_name = args[1] - group_description = args[2] - stop_if_existing = False - - if len(args) == 4: - if args[3] == "--stop-if-existing": - stop_if_existing = True - else: - print >> sys.stderr, 'Unknown directive: ' + args[3] - sys.exit(1) - - if (not os.path.isdir(folder)): - print >> sys.stderr, 'Cannot find directory: ' + folder - sys.exit(1) - - import aiida.orm.data.upf as upf - - files_found, files_uploaded = upf.upload_upf_family(folder, group_name, - group_description, stop_if_existing) - - print "UPF files found: {}. New files uploaded: {}".format(files_found, files_uploaded) - - def listfamilies(self, *args): - """ - Print on screen the list of upf families installed - """ - # note that the following command requires that the upfdata has a - # key called element. As such, it is not well separated. - import argparse - - parser = argparse.ArgumentParser( - prog=self.get_full_command_name(), - description='List AiiDA upf families.') - parser.add_argument('-e', '--element', nargs='+', type=str, default=None, - help="Filter the families only to those containing " - "a pseudo for each of the specified elements") - parser.add_argument('-d', '--with-description', - dest='with_description', action='store_true', - help="Show also the description for the UPF family") - parser.set_defaults(with_description=False) - - args = list(args) - parsed_args = parser.parse_args(args) - - from aiida.orm import DataFactory - from aiida.orm.data.upf import UPFGROUP_TYPE - - UpfData = DataFactory('upf') - from aiida.orm.querybuilder import QueryBuilder - from aiida.orm.group import Group - qb = QueryBuilder() - qb.append(UpfData, tag='upfdata') - if parsed_args.element is not None: - qb.add_filter(UpfData, {'attributes.element': {'in': parsed_args.element}}) - qb.append( - Group, - group_of='upfdata', tag='group', - project=["name", "description"], - filters={"type": {'==': UPFGROUP_TYPE}} - ) - - qb.distinct() - if qb.count() > 0: - for res in qb.dict(): - group_name = res.get("group").get("name") - group_desc = res.get("group").get("description") - qb = QueryBuilder() - qb.append( - Group, - tag='thisgroup', - filters={"name": {'like': group_name}} - ) - qb.append( - UpfData, - project=["id"], - member_of='thisgroup' - ) - - if parsed_args.with_description: - description_string = ": {}".format(group_desc) - else: - description_string = "" - - print "* {} [{} pseudos]{}".format(group_name, qb.count(), - description_string) - - else: - print "No valid UPF pseudopotential family found." - - def exportfamily(self, *args): - """ - Export a pseudopotential family into a folder. - Call without parameters to get some help. - """ - import os - from aiida.common.exceptions import NotExistent - from aiida.orm import DataFactory - - if not len(args) == 2: - print >> sys.stderr, ("After 'upf export' there should be two " - "arguments:") - print >> sys.stderr, ("folder, upf_family_name\n") - sys.exit(1) - - folder = os.path.abspath(args[0]) - group_name = args[1] - - UpfData = DataFactory('upf') - try: - group = UpfData.get_upf_group(group_name) - except NotExistent: - print >> sys.stderr, ("upf family {} not found".format(group_name)) - sys.exit(1) - - for u in group.nodes: - dest_path = os.path.join(folder, u.filename) - if not os.path.isfile(dest_path): - with open(dest_path, 'w') as dest: - with u._get_folder_pathsubfolder.open(u.filename) as source: - dest.write(source.read()) - else: - print >> sys.stdout, ("File {} is already present in the " - "destination folder".format(u.filename)) - - def _import_upf(self, filename, **kwargs): - """ - Importer from UPF. - """ - try: - node, _ = self.dataclass.get_or_create(filename) - print node - except ValueError as e: - print e - - -class _Bands(VerdiCommandWithSubcommands, Listable, Visualizable, Exportable): - """ - Manipulation on the bands - """ - - def __init__(self): - """ - A dictionary with valid commands and functions to be called. - """ - if not is_dbenv_loaded(): - load_dbenv() - from aiida.orm.data.array.bands import BandsData - - self.dataclass = BandsData - self.valid_subcommands = { - 'show': (self.show, self.complete_none), - 'list': (self.list, self.complete_none), - 'export': (self.export, self.complete_none), - } - - def query(self, args): - """ - Perform the query and return information for the list. - - :param args: a namespace with parsed command line parameters. - :return: table (list of lists) with information, describing nodes. - Each row describes a single hit. - """ - from aiida.backends.utils import QueryFactory - if not is_dbenv_loaded(): - load_dbenv() - - q = QueryFactory()() - return q.get_bands_and_parents_structure(args) - - def append_list_cmdline_arguments(self, parser): - """ - Append additional command line parameters, that are later parsed and - used in the query construction. - - :param parser: instance of argparse.ArgumentParser - """ - parser.add_argument('-e', '--element', nargs='+', type=str, default=None, - help="Print all bandsdatas from structures " - "containing desired elements") - parser.add_argument('-eo', '--element-only', nargs='+', type=str, default=None, - help="Print all bandsdatas from structures " - "containing only the selected elements") - parser.add_argument('-f', '--formulamode', metavar='FORMULA_MODE', - type=str, default='hill', - help="Formula printing mode (hill, hill_compact," - " reduce, group, count, or count_compact)" - " (if None, does not print the formula)", - action='store') - parser.add_argument('-p', '--past-days', metavar='N', - help="Add a filter to show only bandsdatas created in the past N days", - type=int, action='store') - parser.add_argument('-g', '--group-name', metavar='N', nargs="+", default=None, - help="add a filter to show only objects belonging to groups", - type=str, action='store') - parser.add_argument('-G', '--group-pk', metavar='N', nargs="+", default=None, - help="add a filter to show only objects belonging to groups", - type=int, action='store') - parser.add_argument('-A', '--all-users', action='store_true', default=False, - help="show groups for all users, rather than only for the" - "current user") - - def append_export_cmdline_arguments(self, parser): - """ - Additional command line arguments for the 'export' command - - :param parser: instance of argparse.ArgumentParser - """ - from aiida.common.utils import Prettifier - - parser.add_argument('--prettify-format', type=str, default=None, - choices=Prettifier.get_prettifiers(), - help='The style of labels for the prettifier') - parser.add_argument('--y-min-lim', type=float, default=None, - help='The minimum value for the y axis. Default: minimum of all bands') - parser.add_argument('--y-max-lim', type=float, default=None, - help='The maximum value for the y axis. Default: maximum of all bands') - - def get_column_names(self): - """ - Return the list with column names. - - :note: neither the number nor correspondence of column names and - actual columns in the output from the query() are checked. - """ - return ["ID", "formula", "ctime", "label"] - - def _export_agr(self, node, output_fname, overwrite, **kwargs): - """ - Export a .agr file, to be visualized with the XMGrace plotting software. - """ - self.print_or_store(node, output_fname, fileformat='agr', overwrite=overwrite, - other_args=kwargs) - - def _export_agr_batch(self, node, output_fname, overwrite, **kwargs): - """ - Export a .agr batch file, to be visualized with the XMGrace plotting software. - """ - self.print_or_store(node, output_fname, fileformat='agr_batch', overwrite=overwrite, - other_args=kwargs) - - def _export_gnuplot(self, node, output_fname, overwrite, **kwargs): - """ - Export a Gnuplot file, together with the corresponding .dat file, - to be visualized with the Gnuplot plotting software. - - Run with 'gnuplot -p filename' to see the plot (and keep the window with - the plot open). - """ - self.print_or_store(node, output_fname, fileformat='gnuplot', overwrite=overwrite, - other_args=kwargs) - - def _export_dat_multicolumn(self, node, output_fname, overwrite, **kwargs): - """ - Export a .dat file with one line per kpoint, with multiple energy values - on the same line separated by spaces. - """ - self.print_or_store(node, output_fname, fileformat='dat_multicolumn', overwrite=overwrite, - other_args=kwargs) - - def _export_dat_blocks(self, node, output_fname, overwrite, **kwargs): - """ - Export a .dat file with one line per datapoint (kpt, energy), - with multiple bands separated in stanzas (i.e. having at least an empty - newline inbetween). - """ - self.print_or_store(node, output_fname, fileformat='dat_blocks', overwrite=overwrite, - other_args=kwargs) - - def _export_json(self, node, output_fname, overwrite, **kwargs): - """ - Export a .dat file with one line per datapoint (kpt, energy), - with multiple bands separated in stanzas (i.e. having at least an empty - newline inbetween). - """ - self.print_or_store(node, output_fname, fileformat='json', overwrite=overwrite, - other_args=kwargs) - - def _export_mpl_singlefile(self, node, output_fname, overwrite, **kwargs): - """ - Export a .py file that would produce the plot using matplotlib - when run with python (with data dumped within the same python file) - """ - self.print_or_store(node, output_fname, fileformat='mpl_singlefile', overwrite=overwrite, - other_args=kwargs) - - def _export_mpl_withjson(self, node, output_fname, overwrite, **kwargs): - """ - Export a .py file that would produce the plot using matplotlib - when run with python (with data dumped in an external json filee) - """ - self.print_or_store(node, output_fname, fileformat='mpl_withjson', overwrite=overwrite, - other_args=kwargs) - - def _export_mpl_png(self, node, output_fname, overwrite, **kwargs): - """ - Export a .png file generated using matplotlib - """ - if not output_fname: - print >> sys.stderr, "To export to PNG please always specify the filename with the -o option" - sys.exit(1) - self.print_or_store(node, output_fname, fileformat='mpl_png', overwrite=overwrite, - other_args=kwargs) - - def _export_mpl_pdf(self, node, output_fname, overwrite, **kwargs): - """ - Export a .pdf file generated using matplotlib - """ - if not output_fname: - print >> sys.stderr, "To export to PDF please always specify the filename with the -o option" - sys.exit(1) - self.print_or_store(node, output_fname, fileformat='mpl_pdf', overwrite=overwrite, - other_args=kwargs) - - def _show_xmgrace(self, exec_name, list_bands): - """ - Plugin for show the bands with the XMGrace plotting software. - """ - import tempfile, subprocess, numpy - from aiida.orm.data.array.bands import max_num_agr_colors - - list_files = [] - current_band_number = 0 - for iband, bands in enumerate(list_bands): - # extract number of bands - nbnds = bands.get_bands().shape[1] - text, _ = bands._exportstring('agr', setnumber_offset=current_band_number, - color_number=numpy.mod(iband + 1, max_num_agr_colors)) - # write a tempfile - f = tempfile.NamedTemporaryFile(suffix='.agr') - f.write(text) - f.flush() - list_files.append(f) - # update the number of bands already plotted - current_band_number += nbnds - - try: - subprocess.check_output([exec_name] + [f.name for f in list_files]) - _ = [f.close() for f in list_files] - except subprocess.CalledProcessError: - # The program died: just print a message - print "Note: the call to {} ended with an error.".format( - exec_name) - _ = [f.close() for f in list_files] - except OSError as e: - _ = [f.close() for f in list_files] - if e.errno == 2: - print ("No executable '{}' found. Add to the path, " - "or try with an absolute path.".format( - exec_name)) - sys.exit(1) - else: - raise - - -class _Structure(VerdiCommandWithSubcommands, - Listable, - Visualizable, - Exportable, - Importable, - Depositable): - """ - Visualize AiIDA structures - """ - - def __init__(self): - """ - A dictionary with valid commands and functions to be called. - """ - if not is_dbenv_loaded(): - load_dbenv() - from aiida.orm.data.structure import StructureData - - super(_Structure, self).__init__() - - self.dataclass = StructureData - self.valid_subcommands = { - 'show': (self.show, self.complete_none), - 'list': (self.list, self.complete_none), - 'export': (self.export, self.complete_none), - 'deposit': (self.deposit, self.complete_none), - 'import': (self.importfile, self.complete_none), - } - - def query(self, args): - """ - Perform the query - """ - if not is_dbenv_loaded(): - load_dbenv() - - from aiida.orm.querybuilder import QueryBuilder - from aiida.orm.data.structure import StructureData - from aiida.orm.implementation import Group - from aiida.orm.data.structure import (get_formula, get_symbols_string) - from aiida.orm.user import User - - qb = QueryBuilder() - if args.all_users is False: - user = self.backend.users.get_automatic_user() - qb.append(User, tag="creator", filters={"email": user.email}) - else: - qb.append(User, tag="creator") - - st_data_filters = {} - self.query_past_days(st_data_filters, args) - qb.append(StructureData, tag="struc", created_by="creator", - filters=st_data_filters, - project=["id", "label", "attributes.kinds", - "attributes.sites"]) - - group_filters = {} - self.query_group(group_filters, args) - if group_filters: - qb.append(Group, tag="group", filters=group_filters, - group_of="struc") - - struc_list_data = qb.distinct() - - entry_list = [] - if struc_list_data.count() > 0: - for [id, label, akinds, asites] in struc_list_data.all(): - - # If symbols are defined there is a filtering of the structures - # based on the element - # When QueryBuilder will support this (attribute)s filtering, - # it will be pushed in the query. - if args.element is not None: - all_symbols = [_["symbols"][0] for _ in akinds] - if not any([s in args.element for s in all_symbols] - ): - continue - - if args.elementonly: - print "Not implemented elementonly search" - sys.exit(1) - - # We want only the StructureData that have attributes - if akinds is None or asites is None: - continue - - symbol_dict = {} - for k in akinds: - symbols = k['symbols'] - weights = k['weights'] - symbol_dict[k['name']] = get_symbols_string(symbols, - weights) - - try: - symbol_list = [] - for s in asites: - symbol_list.append(symbol_dict[s['kind_name']]) - formula = get_formula(symbol_list, - mode=args.formulamode) - # If for some reason there is no kind with the name - # referenced by the site - except KeyError: - formula = "<>" - entry_list.append([str(id), str(formula), label]) - - return entry_list - - def append_list_cmdline_arguments(self, parser): - parser.add_argument('-e', '--element', nargs='+', type=str, default=None, - help="Print all structures containing desired elements") - parser.add_argument('-eo', '--elementonly', action='store_true', - help="If set, structures do not contain different " - "elements (to be used with -e option)") - parser.add_argument('-f', '--formulamode', metavar='FORMULA_MODE', - type=str, default='hill', - help="Formula printing mode (hill, hill_compact," - " reduce, group, count, or count_compact)" - " (if None, does not print the formula)", - action='store') - parser.add_argument('-p', '--past-days', metavar='N', - help="Add a filter to show only structures created in the past N days", - type=int, action='store') - parser.add_argument('-g', '--group-name', metavar='N', nargs="+", default=None, - help="add a filter to show only objects belonging to groups", - type=str, action='store') - parser.add_argument('-G', '--group-pk', metavar='N', nargs="+", default=None, - help="add a filter to show only objects belonging to groups", - type=int, action='store') - parser.add_argument('-A', '--all-users', action='store_true', default=False, - help="show groups for all users, rather than only for the" - "current user") - - def get_column_names(self): - return ["ID", "formula", "label"] - - def _show_xcrysden(self, exec_name, structure_list): - """ - Plugin for xcrysden - """ - import tempfile, subprocess - - if len(structure_list) > 1: - raise MultipleObjectsError("Visualization of multiple objects " - "is not implemented") - structure = structure_list[0] - - with tempfile.NamedTemporaryFile(suffix='.xsf') as f: - f.write(structure._exportstring('xsf')[0]) - f.flush() - - try: - subprocess.check_output([exec_name, '--xsf', f.name]) - except subprocess.CalledProcessError: - # The program died: just print a message - print "Note: the call to {} ended with an error.".format( - exec_name) - except OSError as e: - if e.errno == 2: - print ("No executable '{}' found. Add to the path, " - "or try with an absolute path.".format( - exec_name)) - sys.exit(1) - else: - raise - - def _show_ase(self, exec_name, structure_list): - """ - Plugin to show the structure with the ASE visualizer - """ - try: - from ase.visualize import view - for structure in structure_list: - view(structure.get_ase()) - except ImportError: - raise - - def _show_vesta(self, exec_name, structure_list): - """ - Plugin for VESTA - This VESTA plugin was added by Yue-Wen FANG and Abel Carreras - at Kyoto University in the group of Prof. Isao Tanaka's lab - - """ - import tempfile, subprocess - - with tempfile.NamedTemporaryFile(suffix='.cif') as f: - for structure in structure_list: - f.write(structure._exportstring('cif')[0]) - f.flush() - - try: - subprocess.check_output([exec_name, f.name]) - except subprocess.CalledProcessError: - # The program died: just print a message - print "Note: the call to {} ended with an error.".format( - exec_name) - except OSError as e: - if e.errno == 2: - print ("No executable '{}' found. Add to the path, " - "or try with an absolute path.".format(exec_name)) - sys.exit(1) - else: - raise - - def _show_vmd(self, exec_name, structure_list): - """ - Plugin for vmd - """ - import tempfile, subprocess - - if len(structure_list) > 1: - raise MultipleObjectsError("Visualization of multiple objects " - "is not implemented") - structure = structure_list[0] - - with tempfile.NamedTemporaryFile(suffix='.xsf') as f: - f.write(structure._exportstring('xsf')[0]) - f.flush() - - try: - subprocess.check_output([exec_name, f.name]) - except subprocess.CalledProcessError: - # The program died: just print a message - print "Note: the call to {} ended with an error.".format( - exec_name) - except OSError as e: - if e.errno == 2: - print ("No executable '{}' found. Add to the path, " - "or try with an absolute path.".format( - exec_name)) - sys.exit(1) - else: - raise - - def _show_jmol(self, exec_name, structure_list): - """ - Plugin for jmol - """ - import tempfile, subprocess - - with tempfile.NamedTemporaryFile() as f: - for structure in structure_list: - f.write(structure._exportstring('cif')[0]) - f.flush() - - try: - subprocess.check_output([exec_name, f.name]) - except subprocess.CalledProcessError: - # The program died: just print a message - print "Note: the call to {} ended with an error.".format( - exec_name) - except OSError as e: - if e.errno == 2: - print ("No executable '{}' found. Add to the path, " - "or try with an absolute path.".format( - exec_name)) - sys.exit(1) - else: - raise - - def _export_tcod(self, node, output_fname, overwrite, parameter_data=None, **kwargs): - """ - Plugin for TCOD - """ - - parameters = None - if parameter_data is not None: - from aiida.orm import DataFactory - ParameterData = DataFactory('parameter') - parameters = load_node(parameter_data, sub_class=ParameterData) - self.print_or_store(node, output_fname, fileformat='tcod', overwrite=overwrite, - other_args=kwargs) - - def _export_tcod_parameters(self, parser, **kwargs): - """ - Command line parameters for TCOD - """ - from aiida.tools.dbexporters.tcod import extend_with_cmdline_parameters - extend_with_cmdline_parameters(parser, self.dataclass.__name__) - - def _export_xsf(self, node, output_fname, overwrite, **kwargs): - """ - Exporter to XSF. - """ - self.print_or_store(node, output_fname, fileformat='xsf', overwrite=overwrite, - other_args=kwargs) - - def _export_cif(self, node, output_fname, overwrite, **kwargs): - """ - Exporter to CIF. - """ - self.print_or_store(node, output_fname, fileformat='cif', overwrite=overwrite, - other_args=kwargs) - - def _export_xyz(self, node, output_fname, overwrite, **kwargs): - """ - Exporter to XYZ. - """ - self.print_or_store(node, output_fname, fileformat='xyz', overwrite=overwrite, - other_args=kwargs) - - def _import_xyz_parameters(self, parser): - """ - Adding some functionality to the parser to deal with importing files - """ - # In order to deal with structures that do not have a cell defined: - # We can increase the size of the cell from the minimal cell - # The minimal cell is the cell the just accomodates the structure given, - # defined by the minimum and maximum of position in each dimension - parser.add_argument('--vacuum-factor', type=float, default=1.0, - help='The factor by which the cell accomodating the structure should be increased, default: 1.0') - # To that increased cell, we can also add a "safety margin" - parser.add_argument('--vacuum-addition', type=float, default=10.0, - help='The distance to add to the unit cell after vacuum-factor was applied to expand in each dimension, default: 10.0') - parser.add_argument('--pbc', type=int, nargs=3, default=[0, 0, 0], - help=""" - Set periodic boundary conditions for each lattice direction, - 0 for no periodicity, any other integer for periodicity""") - parser.add_argument('--view', action='store_true', default=False, help='View resulting structure using ASE') - parser.add_argument('--dont-store', action='store_true', default=False, help='Do not store the structure') - - def _import_xyz(self, filename, **kwargs): - """ - Imports an XYZ-file. - """ - from os.path import abspath - vacuum_addition = kwargs.pop('vacuum_addition') - vacuum_factor = kwargs.pop('vacuum_factor') - pbc = [bool(i) for i in kwargs.pop('pbc')] - dont_store = kwargs.pop('dont_store') - view_in_ase = kwargs.pop('view') - - print 'importing XYZ-structure from: \n {}'.format(abspath(filename)) - filepath = abspath(filename) - with open(filepath) as f: - xyz_txt = f.read() - new_structure = self.dataclass() - try: - new_structure._parse_xyz(xyz_txt) - new_structure._adjust_default_cell(vacuum_addition=vacuum_addition, - vacuum_factor=vacuum_factor, - pbc=pbc) - - if not dont_store: - new_structure.store() - if view_in_ase: - from ase.visualize import view - view(new_structure.get_ase()) - print ( - ' Succesfully imported structure {}, ' - '(PK = {})'.format(new_structure.get_formula(), new_structure.pk) - ) - - except ValueError as e: - print e - sys.exit(1) - - def _import_pwi(self, filename, **kwargs): - """ - Imports a structure from a quantumespresso input file. - """ - from os.path import abspath - try: - from qe_tools.parsers.pwinputparser import PwInputFile - except ImportError: - print ("You have not installed the package qe-tools. \n" - "You can install it with: pip install qe-tools") - sys.exit(1) - - dont_store = kwargs.pop('dont_store') - view_in_ase = kwargs.pop('view') - - print 'importing structure from: \n {}'.format(abspath(filename)) - filepath = abspath(filename) - - try: - inputparser = PwInputFile(filepath) - new_structure = inputparser.get_structuredata() - - if not dont_store: - new_structure.store() - if view_in_ase: - from ase.visualize import view - view(new_structure.get_ase()) - print ( - ' Succesfully imported structure {}, ' - '(PK = {})'.format(new_structure.get_formula(), new_structure.pk) - ) - - except ValueError as e: - print e - sys.exit(1) - - def _import_ase(self, filename, **kwargs): - """ - Imports a structure in a number of formats using the ASE routines. - """ - from os.path import abspath - from aiida.orm.data.structure import StructureData - - try: - import ase.io - except ImportError: - print ("You have not installed the package ase. \n" - "You can install it with: pip install ase") - sys.exit(1) - - dont_store = kwargs.pop('dont_store') - view_in_ase = kwargs.pop('view') - - print 'importing structure from: \n {}'.format(abspath(filename)) - filepath = abspath(filename) - - try: - asecell = ase.io.read(filepath) - new_structure = StructureData(ase=asecell) - - if not dont_store: - new_structure.store() - if view_in_ase: - from ase.visualize import view - view(new_structure.get_ase()) - print ( - ' Succesfully imported structure {}, ' - '(PK = {})'.format(new_structure.get_formula(), new_structure.pk) - ) - - except ValueError as e: - print e - sys.exit(1) - - def _deposit_tcod(self, node, parameter_data=None, **kwargs): - """ - Deposition plugin for TCOD. - """ - from aiida.tools.dbexporters.tcod import deposit - - parameters = None - if parameter_data is not None: - from aiida.orm import DataFactory - ParameterData = DataFactory('parameter') - parameters = load_node(parameter_data, sub_class=ParameterData) - return deposit(node, parameters=parameters, **kwargs) - - def _deposit_tcod_parameters(self, parser, **kwargs): - """ - Command line parameters deposition plugin for TCOD. - """ - from aiida.tools.dbexporters.tcod import (deposition_cmdline_parameters, - extend_with_cmdline_parameters) - deposition_cmdline_parameters(parser, self.dataclass.__name__) - extend_with_cmdline_parameters(parser, self.dataclass.__name__) - - -class _Cif(VerdiCommandWithSubcommands, - Listable, Visualizable, Exportable, Importable, Depositable): - """ - Visualize CIF structures - """ - - def __init__(self): - """ - A dictionary with valid commands and functions to be called. - """ - if not is_dbenv_loaded(): - load_dbenv() - from aiida.orm.data.cif import CifData - super(_Cif, self).__init__() - - self.dataclass = CifData - self.valid_subcommands = { - 'show': (self.show, self.complete_none), - 'list': (self.list, self.complete_none), - 'export': (self.export, self.complete_none), - 'import': (self.importfile, self.complete_none), - 'deposit': (self.deposit, self.complete_none), - } - - def _show_jmol(self, exec_name, structure_list): - """ - Plugin for jmol - """ - import tempfile, subprocess - - with tempfile.NamedTemporaryFile() as f: - for structure in structure_list: - f.write(structure._exportstring('cif')[0]) - f.flush() - - try: - subprocess.check_output([exec_name, f.name]) - except subprocess.CalledProcessError: - # The program died: just print a message - print "Note: the call to {} ended with an error.".format( - exec_name) - except OSError as e: - if e.errno == 2: - print ("No executable '{}' found. Add to the path, " - "or try with an absolute path.".format( - exec_name)) - sys.exit(1) - else: - raise - - def _show_vesta(self, exec_name, structure_list): - """ - Plugin for VESTA, added by Yue-Wen FANG and Abel Carreras - at Kyoto University in the group of Prof. Isao Tanaka's lab - """ - import tempfile, subprocess - - with tempfile.NamedTemporaryFile(suffix='.cif') as f: - for structure in structure_list: - f.write(structure._exportstring('cif')[0]) - f.flush() - - try: - subprocess.check_output([exec_name, f.name]) - except subprocess.CalledProcessError: - # The program died: just print a message - print "Note: the call to {} ended with an error.".format(exec_name) - except OSError as e: - if e.errno == 2: - print ("No executable '{}' found. Add to the path, " - "or try with an absolute path.".format(exec_name)) - sys.exit(1) - else: - raise - - def query(self, args): - """ - Perform the query and return information for the list. - - :param args: a namespace with parsed command line parameters. - :return: table (list of lists) with information, describing nodes. - Each row describes a single hit. - """ - if not is_dbenv_loaded(): - load_dbenv() - - from aiida.orm.querybuilder import QueryBuilder - from aiida.orm.implementation import Group - from aiida.orm.user import User - - qb = QueryBuilder() - if args.all_users is False: - user = self.backend.users.get_automatic_user() - qb.append(User, tag="creator", filters={"email": user.email}) - else: - qb.append(User, tag="creator") - - st_data_filters = {} - self.query_past_days(st_data_filters, args) - qb.append(self.dataclass, tag="struc", created_by="creator", - filters=st_data_filters, - project=["*"]) - - group_filters = {} - self.query_group(group_filters, args) - if group_filters: - qb.append(Group, tag="group", filters=group_filters, - group_of="struc") - - qb.order_by({self.dataclass: {'ctime': 'asc'}}) - res = qb.distinct() - - entry_list = [] - if res.count() > 0: - for [obj] in res.iterall(): - formulae = '?' - try: - formulae = ",".join(obj.get_attr('formulae')) - except AttributeError: - pass - except TypeError: - pass - source_uri = '?' - try: - source_uri = obj.get_attr('source')['uri'] - except AttributeError: - pass - except KeyError: - pass - entry_list.append([str(obj.pk), formulae, source_uri]) - return entry_list - - def get_column_names(self): - """ - Return the list with column names. - - :note: neither the number nor correspondence of column names and - actual columns in the output from the query() are checked. - """ - return ["ID", "formulae", "source_uri"] - - def _export_cif(self, node, output_fname, overwrite, **kwargs): - """ - Exporter to CIF. - """ - self.print_or_store(node, output_fname, fileformat='cif', overwrite=overwrite, - other_args=kwargs) - - def _export_tcod(self, node, output_fname, overwrite, parameter_data=None, **kwargs): - """ - Plugin for TCOD - """ - parameters = None - if parameter_data is not None: - from aiida.orm import DataFactory - ParameterData = DataFactory('parameter') - parameters = load_node(parameter_data, sub_class=ParameterData) - self.print_or_store(node, output_fname, fileformat='tcod', overwrite=overwrite, - other_args=kwargs) - - def _export_tcod_parameters(self, parser, **kwargs): - """ - Command line parameters for TCOD - """ - from aiida.tools.dbexporters.tcod import extend_with_cmdline_parameters - extend_with_cmdline_parameters(parser, self.dataclass.__name__) - - def _import_cif(self, filename, **kwargs): - """ - Importer from CIF. - """ - import os.path - - try: - node, _ = self.dataclass.get_or_create(os.path.abspath(filename)) - print node - except ValueError as e: - print e - - def _deposit_tcod(self, node, parameter_data=None, **kwargs): - """ - Deposition plugin for TCOD. - """ - from aiida.tools.dbexporters.tcod import deposit - - parameters = None - if parameter_data is not None: - from aiida.orm import DataFactory - ParameterData = DataFactory('parameter') - parameters = load_node(parameter_data, sub_class=ParameterData) - return deposit(node, parameters=parameters, **kwargs) - - def _deposit_tcod_parameters(self, parser, **kwargs): - """ - Command line parameters deposition plugin for TCOD. - """ - from aiida.tools.dbexporters.tcod import (deposition_cmdline_parameters, - extend_with_cmdline_parameters) - deposition_cmdline_parameters(parser, self.dataclass.__name__) - extend_with_cmdline_parameters(parser, self.dataclass.__name__) - - -class _Trajectory(VerdiCommandWithSubcommands, - Listable, Visualizable, Exportable, Depositable): - """ - View and manipulate TrajectoryData instances. - """ - - def __init__(self): - """ - A dictionary with valid commands and functions to be called. - """ - if not is_dbenv_loaded(): - load_dbenv() - from aiida.orm.data.array.trajectory import TrajectoryData - - self.dataclass = TrajectoryData - self.valid_subcommands = { - 'show': (self.show, self.complete_none), - 'list': (self.list, self.complete_none), - 'export': (self.export, self.complete_none), - 'deposit': (self.deposit, self.complete_none), - } - - def _show_jmol(self, exec_name, trajectory_list, **kwargs): - """ - Plugin for jmol - """ - import tempfile, subprocess - - with tempfile.NamedTemporaryFile() as f: - for trajectory in trajectory_list: - f.write(trajectory._exportstring('cif', **kwargs)[0]) - f.flush() - - try: - subprocess.check_output([exec_name, f.name]) - except subprocess.CalledProcessError: - # The program died: just print a message - print "Note: the call to {} ended with an error.".format( - exec_name) - except OSError as e: - if e.errno == 2: - print ("No executable '{}' found. Add to the path, " - "or try with an absolute path.".format( - exec_name)) - sys.exit(1) - else: - raise - - def _show_jmol_parameters(self, parser): - """ - Describe command line parameters. - """ - parser.add_argument('--step', - help="ID of the trajectory step. If none is " - "supplied, all steps are exported.", - type=int, action='store') - - def _show_xcrysden(self, exec_name, trajectory_list, **kwargs): - """ - Plugin for xcrysden - """ - import tempfile, subprocess - - if len(trajectory_list) > 1: - raise MultipleObjectsError("Visualization of multiple trajectories " - "is not implemented") - trajectory = trajectory_list[0] - - with tempfile.NamedTemporaryFile(suffix='.xsf') as f: - f.write(trajectory._exportstring('xsf', **kwargs)[0]) - f.flush() - - try: - subprocess.check_output([exec_name, '--xsf', f.name]) - except subprocess.CalledProcessError: - # The program died: just print a message - print "Note: the call to {} ended with an error.".format( - exec_name) - except OSError as e: - if e.errno == 2: - print ("No executable '{}' found. Add to the path, " - "or try with an absolute path.".format( - exec_name)) - sys.exit(1) - else: - raise - - def _show_mpl_pos_parameters(self, parser): - """ - Describe command line parameters for _show_pos - """ - parser.add_argument('-s', '--stepsize', - type=int, - help='' - 'The stepsize for the trajectory, set it higher to reduce ' - 'number of points', - default=1 - ) - parser.add_argument('--mintime', - type=int, default=None, - help='The time to plot from' - ) - parser.add_argument('--maxtime', - type=int, default=None, - help='The time to plot to' - ) - parser.add_argument('-e', '--elements', - type=str, nargs='+', - help='Show only atoms of that species' - ) - parser.add_argument('-i', '--indices', - type=int, nargs='+', - help='Show only these indices' - ) - parser.add_argument('--dont-block', - action='store_true', - help="Don't block interpreter when showing plot" - ) - - def _show_mpl_heatmap_parameters(self, parser): - """ - Describe command line parameters for _show_mpl_heatmap - """ - parser.add_argument('-c', '--contours', - type=float, nargs='+', - help='Isovalues to plot' - ) - parser.add_argument('--sampling-stepsize', - type=int, - help='Sample positions in plot every sampling_stepsize timestep' - ) - - def _show_mpl_pos(self, exec_name, trajectory_list, **kwargs): - """ - Produces a matplotlib plot of the trajectory - """ - for t in trajectory_list: - t.show_mpl_pos(**kwargs) - - def _show_mpl_heatmap(self, exec_name, trajectory_list, **kwargs): - """ - Produces a matplotlib plot of the trajectory - """ - for t in trajectory_list: - t.show_mpl_heatmap(**kwargs) - - def _export_xsf(self, node, output_fname, overwrite, **kwargs): - """ - Exporter to XSF. - """ - self.print_or_store(node, output_fname, fileformat='xsf', overwrite=overwrite, - other_args=kwargs) - - def _export_tcod(self, node, output_fname, overwrite, parameter_data=None, **kwargs): - """ - Plugin for TCOD - """ - - parameters = None - if parameter_data is not None: - from aiida.orm import DataFactory - ParameterData = DataFactory('parameter') - parameters = load_node(parameter_data, sub_class=ParameterData) - self.print_or_store(node, output_fname, fileformat='tcod', overwrite=overwrite, - other_args=kwargs) - - def _export_tcod_parameters(self, parser, **kwargs): - """ - Command line parameters for TCOD - """ - from aiida.tools.dbexporters.tcod import extend_with_cmdline_parameters - extend_with_cmdline_parameters(parser, self.dataclass.__name__) - - def _export_cif(self, node, output_fname, overwrite, **kwargs): - """ - Exporter to CIF. - """ - self.print_or_store(node, output_fname, fileformat='cif', overwrite=overwrite, - other_args=kwargs) - - def _export_cif_parameters(self, parser, **kwargs): - """ - Describe command line parameters. - """ - parser.add_argument('--step', dest='trajectory_index', - help="ID of the trajectory step. If none is " - "supplied, all steps are exported.", - type=int, action='store') - - def _deposit_tcod(self, node, parameter_data=None, **kwargs): - """ - Deposition plugin for TCOD. - """ - from aiida.tools.dbexporters.tcod import deposit - - parameters = None - if parameter_data is not None: - from aiida.orm import DataFactory - ParameterData = DataFactory('parameter') - parameters = load_node(parameter_data, sub_class=ParameterData) - return deposit(node, parameters=parameters, **kwargs) - - def _deposit_tcod_parameters(self, parser, **kwargs): - """ - Command line parameters deposition plugin for TCOD. - """ - from aiida.tools.dbexporters.tcod import (deposition_cmdline_parameters, - extend_with_cmdline_parameters) - deposition_cmdline_parameters(parser, self.dataclass.__name__) - extend_with_cmdline_parameters(parser, self.dataclass.__name__) - self._export_cif_parameters(parser) - - -class _Parameter(VerdiCommandWithSubcommands, Visualizable): - """ - View and manipulate Parameter data classes. - """ - - def __init__(self): - """ - A dictionary with valid commands and functions to be called. - """ - if not is_dbenv_loaded(): - load_dbenv() - from aiida.orm.data.parameter import ParameterData - - self.dataclass = ParameterData - self._default_show_format = 'json_date' - self.valid_subcommands = { - 'show': (self.show, self.complete_none), - } - - def _show_json_date(self, exec_name, node_list): - """ - Show contents of ParameterData nodes. - """ - from aiida.cmdline import print_dictionary - - for node in node_list: - the_dict = node.get_dict() - print_dictionary(the_dict, 'json+date') - - -class _Array(VerdiCommandWithSubcommands, Visualizable): - """ - View and manipulate Array data classes. - """ - - def __init__(self): - """ - A dictionary with valid commands and functions to be called. - """ - if not is_dbenv_loaded(): - load_dbenv() - - from aiida.orm.data.array import ArrayData - - self.dataclass = ArrayData - self._default_show_format = 'json_date' - self.valid_subcommands = { - 'show': (self.show, self.complete_none), - } - - def _show_json_date(self, exec_name, node_list): - """ - Show contents of ArrayData nodes. - """ - from aiida.cmdline import print_dictionary - - for node in node_list: - the_dict = {} - for arrayname in node.arraynames(): - the_dict[arrayname] = node.get_array(arrayname).tolist() - print_dictionary(the_dict, 'json+date') - - -class _Remote(VerdiCommandWithSubcommands): - """ - Manage RemoteData objects - """ - - def __init__(self): - self.valid_subcommands = { - 'ls': (self.do_listdir, self.complete_none), - 'cat': (self.do_cat, self.complete_none), - 'show': (self.do_show, self.complete_none), - } - - def do_listdir(self, *args): - """ - List directory content on remote RemoteData objects. - """ - import argparse - import datetime - from aiida.backends.utils import load_dbenv, is_dbenv_loaded - from aiida.common.utils import get_mode_string - - parser = argparse.ArgumentParser( - prog=self.get_full_command_name(), - description='List directory content on remote RemoteData objects.') - - parser.add_argument('-l', '--long', action='store_true', - help="Display also file metadata") - parser.add_argument('pk', type=int, help="PK of the node") - parser.add_argument('path', nargs='?', default='.', help="The folder to list") - - args = list(args) - parsed_args = parser.parse_args(args) - - if not is_dbenv_loaded(): - load_dbenv() - - try: - n = load_node(parsed_args.pk) - except Exception as e: - click.echo(e.message, err=True) - sys.exit(1) - try: - content = n.listdir_withattributes(path=parsed_args.path) - except (IOError, OSError) as e: - click.echo("Unable to access the remote folder or file, check if it exists.", err=True) - click.echo("Original error: {}".format(str(e)), err=True) - sys.exit(1) - for metadata in content: - if parsed_args.long: - mtime = datetime.datetime.fromtimestamp( - metadata['attributes'].st_mtime) - pre_line = '{} {:10} {} '.format( - get_mode_string(metadata['attributes'].st_mode), - metadata['attributes'].st_size, - mtime.strftime("%d %b %Y %H:%M") - ) - click.echo(pre_line, nl=False) - if metadata['isdir']: - click.echo(click.style(metadata['name'], fg='blue')) - else: - click.echo(metadata['name']) - - def do_cat(self, *args): - """ - Show the content of remote files in RemoteData objects. - """ - # Note: the implementation is not very efficient: if first downloads the full file on a file on the disk, - # then prints it and finally deletes the file. - # TODO: change it to open the file and stream it; it requires to add an openfile() method to the transport - import argparse - import datetime - from aiida.backends.utils import load_dbenv, is_dbenv_loaded - import tempfile - import os - - parser = argparse.ArgumentParser( - prog=self.get_full_command_name(), - description='Show the content of remote files in RemoteData objects.') - - parser.add_argument('pk', type=int, help="PK of the node") - parser.add_argument('path', type=str, help="The (relative) path to the file to show") - - args = list(args) - parsed_args = parser.parse_args(args) - - if not is_dbenv_loaded(): - load_dbenv() - - try: - n = load_node(parsed_args.pk) - except Exception as e: - click.echo(e.message, err=True) - sys.exit(1) - - try: - with tempfile.NamedTemporaryFile(delete=False) as f: - f.close() - n.getfile(parsed_args.path, f.name) - with open(f.name) as fobj: - sys.stdout.write(fobj.read()) - except IOError as e: - click.echo("ERROR {}: {}".format(e.errno, str(e)), err=True) - sys.exit(1) - - try: - os.remove(f.name) - except OSError: - # If you cannot delete, ignore (maybe I didn't manage to create it in the first place - pass - - def do_show(self, *args): - """ - Show information on a RemoteData object. - """ - import argparse - import datetime - from aiida.backends.utils import load_dbenv, is_dbenv_loaded - import tempfile - import os - - parser = argparse.ArgumentParser( - prog=self.get_full_command_name(), - description='Show information on a RemoteData object.') - - parser.add_argument('pk', type=int, help="PK of the node") - - args = list(args) - parsed_args = parser.parse_args(args) - - if not is_dbenv_loaded(): - load_dbenv() - - try: - n = load_node(parsed_args.pk) - except Exception as e: - click.echo(e.message, err=True) - sys.exit(1) - - click.echo("- Remote computer name:") - click.echo(" {}".format(n.get_computer_name())) - click.echo("- Remote folder full path:") - click.echo(" {}".format(n.get_remote_path())) diff --git a/aiida/cmdline/commands/data/__init__.py b/aiida/cmdline/commands/data/__init__.py new file mode 100644 index 0000000000..7c2e6e658b --- /dev/null +++ b/aiida/cmdline/commands/data/__init__.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida_core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +from aiida.cmdline.baseclass import VerdiCommandWithSubcommands +from aiida.cmdline.commands import verdi, verdi_data + + +class Data(VerdiCommandWithSubcommands): + """ + Setup and manage data specific types + + There is a list of subcommands for managing specific types of data. + For instance, 'data upf' manages pseudopotentials in the UPF format. + """ + + def __init__(self): + from aiida.backends.utils import load_dbenv, is_dbenv_loaded + if not is_dbenv_loaded(): + load_dbenv() + + from aiida.cmdline.commands.data import upf + from aiida.cmdline.commands.data import structure + from aiida.cmdline.commands.data import bands + from aiida.cmdline.commands.data import cif + from aiida.cmdline.commands.data import trajectory + from aiida.cmdline.commands.data import parameter + from aiida.cmdline.commands.data import array + from aiida.cmdline.commands.data import remote + + self.valid_subcommands = { + 'upf': (self.cli, self.complete_none), + 'structure': (self.cli, self.complete_none), + 'bands': (self.cli, self.complete_none), + 'cif': (self.cli, self.complete_none), + 'trajectory': (self.cli, self.complete_none), + 'parameter': (self.cli, self.complete_none), + 'array': (self.cli, self.complete_none), + 'remote': (self.cli, self.complete_none), + } + + def cli(self, *args): + verdi() diff --git a/aiida/cmdline/commands/data/array.py b/aiida/cmdline/commands/data/array.py new file mode 100644 index 0000000000..5c68e5cba9 --- /dev/null +++ b/aiida/cmdline/commands/data/array.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida_core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +""" +This allows to manage ArrayData objects from command line. +""" +import click + +from aiida.cmdline.commands import verdi_data +from aiida.cmdline.params import arguments +from aiida.cmdline.utils import echo + + +# pylint: disable=unused-argument +@verdi_data.group('array') +@click.pass_context +def array(ctx): + """ + Manipulate ArrayData objects + """ + pass + + +@array.command('show') +@arguments.NODES() +def show(nodes): + """ + Visualize array object + """ + from aiida.orm.data.array import ArrayData + from aiida.cmdline import print_dictionary + for node in nodes: + if not isinstance(node, ArrayData): + echo.echo_critical("Node {} is of class {} instead of" " {}".format(node, type(node), ArrayData)) + the_dict = {} + for arrayname in node.arraynames(): + the_dict[arrayname] = node.get_array(arrayname).tolist() + print_dictionary(the_dict, 'json+date') diff --git a/aiida/cmdline/commands/data/bands.py b/aiida/cmdline/commands/data/bands.py new file mode 100644 index 0000000000..8e1a917d90 --- /dev/null +++ b/aiida/cmdline/commands/data/bands.py @@ -0,0 +1,220 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida_core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +""" +This allows to manage BandsData objects from command line. +""" +from __future__ import print_function +import sys +import click +from aiida.cmdline.commands.data.list import list_options +from aiida.cmdline.commands.data.export import _export +from aiida.cmdline.params.options.multivalue import MultipleValueOption +from aiida.cmdline.commands import verdi_data +from aiida.cmdline.params import arguments +from aiida.cmdline.params import options +from aiida.cmdline.utils import echo +from aiida.orm.data.array.bands import BandsData +from aiida.common.utils import Prettifier + + +def show_xmgrace(exec_name, list_bands): + """ + Plugin for showing the bands with the XMGrace plotting software. + """ + import tempfile + import subprocess + import numpy + from aiida.orm.data.array.bands import max_num_agr_colors + + list_files = [] + current_band_number = 0 + for iband, bnds in enumerate(list_bands): + # extract number of bands + nbnds = bnds.get_bands().shape[1] + # pylint: disable=protected-access + text, _ = bnds._exportstring( + 'agr', setnumber_offset=current_band_number, color_number=numpy.mod(iband + 1, max_num_agr_colors)) + # write a tempfile + tempf = tempfile.NamedTemporaryFile(suffix='.agr') + tempf.write(text) + tempf.flush() + list_files.append(tempf) + # update the number of bands already plotted + current_band_number += nbnds + + try: + subprocess.check_output([exec_name] + [f.name for f in list_files]) + _ = [f.close() for f in list_files] + except subprocess.CalledProcessError: + print("Note: the call to {} ended with an error.".format(exec_name)) + _ = [f.close() for f in list_files] + except OSError as err: + _ = [f.close() for f in list_files] + if err.errno == 2: + print("No executable '{}' found. Add to the path," " or try with an absolute path.".format(exec_name)) + sys.exit(1) + else: + raise + + +# pylint: disable=unused-argument +@verdi_data.group('bands') +@click.pass_context +def bands(ctx): + """ + Manipulate BandsData objects + """ + pass + + +@bands.command('show') +@arguments.NODES() +@click.option( + '-f', + '--format', + 'show_format', + type=click.Choice(['xmgrace']), + default='xmgrace', + help="Filter the families only to those containing " + "a pseudo for each of the specified elements") +def show(nodes, show_format): + """ + Visualize bands objects + """ + for node in nodes: + if not isinstance(node, BandsData): + echo.echo_critical("Node {} is of class {} instead " "of {}".format(node, type(node), BandsData)) + show_xmgrace(show_format, nodes) + + +PROJECT_HEADERS = ['ID', 'Formula', 'Ctime', 'Label'] + + +# pylint: disable=too-many-arguments +@bands.command('list') +@list_options +@click.option( + '-e', + '--elements', + type=click.STRING, + cls=MultipleValueOption, + default=None, + help="Print only the objects that" + " contain desired elements") +@click.option( + '-eo', + '--elements-only', + type=click.STRING, + cls=MultipleValueOption, + default=None, + help="Print only the objects that" + " contain only the selected elements") +@click.option( + '-f', + '--formulamode', + type=click.Choice(['hill', 'hill_compact', 'reduce', 'group', 'count', 'count_compact']), + default='hill', + help="Formula printing mode (if None, does not print the formula)") +def bands_list(elements, elements_only, raw, formulamode, past_days, groups, all_users): + """ + List bands objects + """ + # from aiida.orm.data.cif import CifData + from aiida.backends.utils import QueryFactory + from tabulate import tabulate + from argparse import Namespace + + args = Namespace() + args.element = elements + args.element_only = elements_only + args.formulamode = formulamode + args.past_days = past_days + args.group_name = None + if groups is not None: + args.group_pk = [group.id for group in groups] + else: + args.group_pk = None + args.all_users = all_users + + query = QueryFactory()() + entry_list = query.get_bands_and_parents_structure(args) + + counter = 0 + bands_list_data = list() + if not raw: + bands_list_data.append(PROJECT_HEADERS) + for entry in entry_list: + for i, value in enumerate(entry): + if isinstance(value, list): + entry[i] = ",".join(value) + for i in range(len(entry), len(PROJECT_HEADERS)): + entry.append(None) + counter += 1 + bands_list_data.extend(entry_list) + if raw: + echo.echo(tabulate(bands_list_data, tablefmt='plain')) + else: + echo.echo(tabulate(bands_list_data, headers="firstrow")) + echo.echo("\nTotal results: {}\n".format(counter)) + + +@bands.command('export') +@click.option( + '-y', + '--format', + 'used_format', + type=click.Choice([ + 'agr', 'agr_batch', 'dat_blocks', 'dat_multicolumn', 'gnuplot', 'json', 'mpl_pdf', 'mpl_png', 'mpl_singlefile', + 'mpl_withjson' + ]), + default='json', + help="Type of the exported file.") +@click.option( + '--y-min-lim', + type=click.FLOAT, + default=None, + help='The minimum value for the y axis.' + ' Default: minimum of all bands') +@click.option( + '--y-max-lim', + type=click.FLOAT, + default=None, + help='The maximum value for the y axis.' + ' Default: maximum of all bands') +@click.option( + '-o', + '--output', + type=click.STRING, + default=None, + help="If present, store the output directly on a file " + "with the given name. It is essential to use this option " + "if more than one file needs to be created.") +@options.FORCE(help="If passed, overwrite files without checking.") +@click.option( + '--prettify-format', + default=None, + type=click.Choice(Prettifier.get_prettifiers()), + help='The style of labels for the prettifier') +@arguments.NODE() +def export(used_format, y_min_lim, y_max_lim, output, force, prettify_format, node): + """ + Export bands objects + """ + args = {} + if y_min_lim is not None: + args['y_min_lim'] = y_min_lim + if y_max_lim is not None: + args['y_max_lim'] = y_max_lim + if prettify_format is not None: + args['prettify_format'] = prettify_format + + if not isinstance(node, BandsData): + echo.echo_critical("Node {} is of class {} instead of {}".format(node, type(node), BandsData)) + _export(node, output, used_format, other_args=args, overwrite=force) diff --git a/aiida/cmdline/commands/data/cif.py b/aiida/cmdline/commands/data/cif.py new file mode 100644 index 0000000000..ebd19ed9a0 --- /dev/null +++ b/aiida/cmdline/commands/data/cif.py @@ -0,0 +1,174 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida_core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +""" +This allows to manage CifData objects from command line. +""" +import click +from aiida.cmdline.commands import verdi_data +from aiida.cmdline.commands.data.list import _list, list_options +from aiida.cmdline.commands.data.export import _export, export_options +from aiida.cmdline.commands.data.deposit import deposit_tcod, deposit_options +from aiida.cmdline.utils import echo +from aiida.cmdline.params import arguments + +from aiida.orm.data.structure import StructureData + + +# pylint: disable=unused-argument +@verdi_data.group('cif') +@click.pass_context +def cif(ctx): + """help""" + pass + + +@cif.command('show') +@arguments.NODES() +@click.option( + '-f', + '--format', + 'given_format', + type=click.Choice(['jmol', 'vesta']), + default='jmol', + help="Type of the visualization format/tool.") +def show(nodes, given_format): + """ + Visualize CifData objects + """ + from aiida.orm.data.cif import CifData + from aiida.cmdline.commands.data.show import _show_jmol + from aiida.cmdline.commands.data.show import _show_vesta + + for node in nodes: + if not isinstance(node, CifData): + echo.echo_critical("Node {} is of class {} instead " "of {}".format(node, type(node), StructureData)) + if given_format == "jmol": + _show_jmol(given_format, nodes) + elif given_format == "vesta": + _show_vesta(given_format, nodes) + else: + raise NotImplementedError("The format {} is not yet implemented".format(given_format)) + + +PROJECT_HEADERS = ["Id", "Formulae", "Source.URI"] + + +@cif.command('list') +@click.option( + '-f', + '--formulamode', + type=click.Choice(['hill', 'hill_compact', 'reduce', 'group', 'count', 'count_compact']), + default='hill', + help="Formula printing mode (if None, does not print the formula)") +@list_options +def cif_list(raw, formulamode, past_days, groups, all_users): + """ + List store CifData objects + """ + from aiida.orm.data.cif import CifData + from tabulate import tabulate + elements = None + elements_only = False + + entry_list = _list(CifData, PROJECT_HEADERS, elements, elements_only, formulamode, past_days, groups, all_users) + + counter = 0 + cif_list_data = list() + + if not raw: + cif_list_data.append(PROJECT_HEADERS) + for entry in entry_list: + for i, value in enumerate(entry): + if isinstance(value, list): + new_entry = list() + for elm in value: + if elm is None: + new_entry.append('') + else: + new_entry.append(elm) + entry[i] = ",".join(new_entry) + for i in range(len(entry), len(PROJECT_HEADERS)): + entry.append(None) + counter += 1 + cif_list_data.extend(entry_list) + if raw: + echo.echo(tabulate(cif_list_data, tablefmt='plain')) + else: + echo.echo(tabulate(cif_list_data, headers="firstrow")) + echo.echo("\nTotal results: {}\n".format(counter)) + + +SUPPORTED_FORMATS = ['cif', 'tcod'] + + +@cif.command('export') +@click.option('-y', '--format', type=click.Choice(SUPPORTED_FORMATS), default='cif', help="Type of the exported file.") +@export_options +def export(**kwargs): + """ + Export CifData object + """ + from aiida.orm.data.cif import CifData + + node = kwargs.pop('node') + output = kwargs.pop('output') + fmt = kwargs.pop('format') + force = kwargs.pop('force') + + for key, value in kwargs.items(): + if value is None: + kwargs.pop(key) + + if not isinstance(node, CifData): + echo.echo_critical("Node {} is of class {} instead of {}".format(node, type(node), CifData)) + _export(node, output, fmt, other_args=kwargs, overwrite=force) + + +@cif.command('import') +@click.argument('filename', type=click.Path(exists=True, dir_okay=False, resolve_path=True)) +def importfile(filename): + """ + Import structure into CifData object + """ + import os + from aiida.orm.data.cif import CifData + + try: + node, _ = CifData.get_or_create(os.path.abspath(filename)) + echo.echo_success("imported {}".format(str(node))) + except ValueError as err: + echo.echo_critical(err) + + +@cif.command('deposit') +@deposit_options +def deposit(**kwargs): + """ + Deposit CifData object + """ + from aiida.orm.data.cif import CifData + # if not is_dbenv_loaded(): + # load_dbenv() + node = kwargs.pop('node') + deposition_type = kwargs.pop('deposition_type') + parameter_data = kwargs.pop('parameter_data') + + #if kwargs['database'] is None: + #echo.echo_critical("Default database is not defined, please specify.") + kwargs.pop('database') # looks like a bug, but deposit function called inside deposit_tcod + # complains about the 'database' keywords argument + + for key, value in kwargs.items(): + if value is None: + kwargs.pop(key) + + if not isinstance(node, CifData): + echo.echo_critical("Node {} is of class {} instead of {}".format(node, type(node), CifData)) + echo.echo(deposit_tcod(node, deposition_type, parameter_data, **kwargs)) diff --git a/aiida/cmdline/commands/data/deposit.py b/aiida/cmdline/commands/data/deposit.py new file mode 100644 index 0000000000..6bef0a341c --- /dev/null +++ b/aiida/cmdline/commands/data/deposit.py @@ -0,0 +1,112 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida_core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +""" +This module provides deposit functionality to all data types +""" +import click +from aiida.cmdline import delayed_load_node as load_node +from aiida.cmdline.params import arguments + +DEPOSIT_OPTIONS = [ + click.option( + '-d', + '--database', + 'database', + type=click.Choice(['tcod']), + default='tcod', + help="Label of the database for deposition."), + click.option( + '--deposition-type', + type=click.Choice(['published', 'prepublication', 'personal']), + default='published', + help="Type of the deposition."), + click.option('-u', '--username', type=click.STRING, default=None, help="Depositor's username."), + click.option('-p', '--password', is_flag=True, default=False, help="Depositor's password."), + click.option('--user-email', type=click.STRING, default=None, help="Depositor's e-mail address."), + click.option('--title', type=click.STRING, default=None, help="Title of the publication."), + click.option('--author-name', type=click.STRING, default=None, help="Full name of the publication author."), + click.option('--author-email', type=click.STRING, default=None, help="E-mail address of the publication author."), + click.option('--url', type=click.STRING, default=None, help="URL of the deposition API."), + click.option( + '--code', + type=click.STRING, + default=None, + help="Label of the code to be used for the deposition." + " Default: cif_cod_deposit."), + click.option('--computer', type=click.STRING, default=None, help="Name of the computer to be used for deposition."), + click.option( + '--replace', type=click.INT, default=None, help="ID of the structure to be redeposited (replaced), if any."), + click.option( + '-m', + '--message', + type=click.STRING, + default=None, + help="Description of the change (relevant for redepositions only)."), + click.option( + '--reduce-symmetry/--no-reduce-symmetry', + 'reduce_symmetry', + is_flag=True, + default=None, + help='Do (default) or do not perform symmetry reduction.'), + click.option( + '--parameter-data', + type=click.INT, + default=None, + help="ID of the ParameterData to be exported alongside the" + " StructureData instance. By default, if StructureData" + " originates from a calculation with single" + " ParameterData in the output, aforementioned" + " ParameterData is picked automatically. Instead, the" + " option is used in the case the calculation produces" + " more than a single instance of ParameterData."), + click.option( + '--dump-aiida-database/--no-dump-aiida-database', + 'dump_aiida_database', + is_flag=True, + default=None, + help='Export (default) or do not export AiiDA database to the CIF file.'), + click.option( + '--exclude-external-contents/--no-exclude-external-contents', + 'exclude_external_contents', + is_flag=True, + default=None, + help='Do not (default) or do save the contents for external resources even if URIs are provided'), + click.option( + '--gzip/--no-gzip', 'gzip', is_flag=True, default=None, help='Do or do not (default) gzip large files.'), + click.option( + '--gzip-threshold', + type=click.INT, + default=None, + help="Specify the minimum size of exported file which should" + " be gzipped."), + arguments.NODE(), +] + + +def deposit_options(func): + for option in reversed(DEPOSIT_OPTIONS): + func = option(func) + + return func + + +def deposit_tcod(node, deposit_type, parameter_data=None, **kwargs): + """ + Deposition plugin for TCOD. + """ + from aiida.tools.dbexporters.tcod import deposit + parameters = None + if parameter_data is not None: + from aiida.orm import DataFactory + # pylint: disable=invalid-name + ParameterData = DataFactory('parameter') + parameters = load_node(parameter_data, sub_class=ParameterData) + + return deposit(node, deposit_type, parameters, **kwargs) diff --git a/aiida/cmdline/commands/data/export.py b/aiida/cmdline/commands/data/export.py new file mode 100644 index 0000000000..f71f1e2217 --- /dev/null +++ b/aiida/cmdline/commands/data/export.py @@ -0,0 +1,112 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida_core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +""" +This module provides export functionality to all data types +""" +import click +from aiida.cmdline.utils import echo +from aiida.cmdline.params import arguments +from aiida.cmdline.params import options + +EXPORT_OPTIONS = [ + click.option( + '--reduce-symmetry/--no-reduce-symmetry', + 'reduce_symmetry', + is_flag=True, + default=None, + help='Do (default) or do not perform symmetry reduction.'), + click.option( + '--parameter-data', + type=click.INT, + default=None, + help="ID of the ParameterData to be exported alongside the" + " StructureData instance. By default, if StructureData" + " originates from a calculation with single" + " ParameterData in the output, aforementioned" + " ParameterData is picked automatically. Instead, the" + " option is used in the case the calculation produces" + " more than a single instance of ParameterData."), + click.option( + '--dump-aiida-database/--no-dump-aiida-database', + 'dump_aiida_database', + is_flag=True, + default=None, + help='Export (default) or do not export AiiDA database to the CIF file.'), + click.option( + '--exclude-external-contents/--no-exclude-external-contents', + 'exclude_external_contents', + is_flag=True, + default=None, + help='Do not (default) or do save the contents for external resources even if URIs are provided'), + click.option('--gzip/--no-gzip', is_flag=True, default=None, help='Do or do not (default) gzip large files.'), + click.option( + '--gzip-threshold', + type=click.INT, + default=None, + help="Specify the minimum size of exported file which should" + " be gzipped."), + click.option( + '-o', + '--output', + type=click.STRING, + default=None, + help="If present, store the output directly on a file " + "with the given name. It is essential to use this option " + "if more than one file needs to be created."), + options.FORCE(help="If passed, overwrite files without checking."), + arguments.NODE(), +] + + +def export_options(func): + for option in reversed(EXPORT_OPTIONS): + func = option(func) + + return func + + +def _export(node, output_fname, fileformat, other_args=None, overwrite=False): + """ + Depending on the parameters, either print the (single) output file on + screen, or store the file(s) on disk. + + :param node: the Data node to print or store on disk + :param output_fname: The filename to store the main file. If empty or + None, print instead + :param fileformat: a string to pass to the _exportstring method + :param other_args: a dictionary with additional kwargs to pass to _exportstring + :param overwrite: if False, stops if any file already exists (when output_fname + is not empty + + :note: this function calls directly sys.exit(1) when an error occurs (or e.g. if + check_overwrite is True and a file already exists). + """ + if other_args is None: + other_args = {} + try: + # pylint: disable=protected-access + if output_fname: + try: + node.export(output_fname, fileformat=fileformat, overwrite=overwrite, **other_args) + except OSError as err: + echo.echo_critical("verdi: ERROR while exporting file:\n" + err.message) + else: + filetext, extra_files = node._exportstring(fileformat, main_file_name=output_fname, **other_args) + if extra_files: + echo.echo_critical("This format requires to write more than one file.\n" + "You need to pass the -o option to specify a file name.") + else: + print filetext + except TypeError as err: + # This typically occurs for parameters that are passed down to the + # methods in, e.g., BandsData, but they are not accepted + echo.echo_critical("verdi: ERROR, probably a parameter is not " + "supported by the specific format.\nError " + "message: {}".format(err.message)) diff --git a/aiida/cmdline/commands/data/list.py b/aiida/cmdline/commands/data/list.py new file mode 100644 index 0000000000..57641f66ac --- /dev/null +++ b/aiida/cmdline/commands/data/list.py @@ -0,0 +1,114 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida_core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +""" +This module provides list functionality to all data types. +""" +import click +from aiida.orm.implementation import Group +from aiida.orm.user import User +from aiida.orm.backend import construct_backend +from aiida.cmdline.params import options + +LIST_OPTIONS = [ + click.option( + '-p', + '--past-days', + type=click.INT, + default=None, + help="Add a filter to show only datas" + " created in the past N days"), + click.option( + '-A', + '--all-users', + is_flag=True, + default=False, + help="show for all users, rather than only for the" + "current user"), + options.RAW(), +] + + +def list_options(func): + """ + Creates a decorator with all the options + """ + for option in reversed(LIST_OPTIONS): + func = option(func) + + # Additional options + # For some weird reason, if the following options are added to the above + # list they don't perform as expected (i.e. they stop being + # MultipleValueOption) + func = options.GROUPS()(func) + + return func + + +def query(datatype, project, past_days, group_pks, all_users): + """ + Perform the query + """ + import datetime + + from aiida.orm.querybuilder import QueryBuilder + from aiida.utils import timezone + + backend = construct_backend() + + qbl = QueryBuilder() + if all_users is False: + user = backend.users.get_automatic_user() + qbl.append(User, tag="creator", filters={"email": user.email}) + else: + qbl.append(User, tag="creator") + + # If there is a time restriction + data_filters = {} + if past_days is not None: + now = timezone.now() + n_days_ago = now - datetime.timedelta(days=past_days) + data_filters.update({"ctime": {'>=': n_days_ago}}) + + qbl.append(datatype, tag="data", created_by="creator", filters=data_filters, project=project) + + # If there is a group restriction + if group_pks is not None: + group_filters = dict() + group_filters.update({"id": {"in": group_pks}}) + qbl.append(Group, tag="group", filters=group_filters, group_of="data") + + qbl.order_by({datatype: {'ctime': 'asc'}}) + + object_list = qbl.distinct() + return object_list.all() + + +# pylint: disable=unused-argument,too-many-arguments +def _list(datatype, columns, elements, elements_only, formulamode, past_days, groups, all_users): + """ + List stored objects + """ + columns_dict = { + 'ID': 'id', + 'Id': 'id', + 'Ctime': 'ctime', + 'Label': 'label', + 'Formula': 'attributes.formula', + 'Kinds': 'attributes.kinds', + 'Sites': 'attributes.sites', + 'Formulae': 'attributes.formulae', + 'Source': 'attributes.source', + 'Source.URI': 'attributes.source.uri', + } + project = [columns_dict[k] for k in columns] + group_pks = None + if groups is not None: + group_pks = [g.pk for g in groups] + return query(datatype, project, past_days, group_pks, all_users) diff --git a/aiida/cmdline/commands/data/parameter.py b/aiida/cmdline/commands/data/parameter.py new file mode 100644 index 0000000000..4f7da72723 --- /dev/null +++ b/aiida/cmdline/commands/data/parameter.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida_core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +""" +This allows to manage ParameterData objects from command line. +""" +import click +from aiida.cmdline.commands import verdi_data +from aiida.cmdline.params import arguments +from aiida.cmdline.utils import echo + + +# pylint: disable=unused-argument +@verdi_data.group('parameter') +@click.pass_context +def parameter(ctx): + """ + View and manipulate Parameter data classes. + """ + pass + + +@parameter.command('show') +@arguments.NODES() +def show(nodes): + """ + Show contents of ParameterData nodes. + """ + from aiida.orm.data.parameter import ParameterData + from aiida.cmdline import print_dictionary + for node in nodes: + if not isinstance(node, ParameterData): + echo.echo_error("Node {} is of class {} instead of {}".format(node, type(node), ParameterData)) + continue + the_dict = node.get_dict() + print_dictionary(the_dict, 'json+date') diff --git a/aiida/cmdline/commands/data/remote.py b/aiida/cmdline/commands/data/remote.py new file mode 100644 index 0000000000..93145602cb --- /dev/null +++ b/aiida/cmdline/commands/data/remote.py @@ -0,0 +1,94 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida_core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +""" +This allows to manage ParameterData objects from command line. +""" +import click +from aiida.cmdline.commands import verdi_data +from aiida.cmdline.params import arguments +from aiida.cmdline.utils import echo +from aiida.common.utils import get_mode_string + + +# pylint: disable=unused-argument +@verdi_data.group('remote') +@click.pass_context +def remote(ctx): + """ + Managing Remote_Data data types + """ + pass + + +@remote.command('ls') +@click.option('-l', '--long', 'ls_long', is_flag=True, default=False, help="Display also file metadata") +@click.option('-p', '--path', type=click.STRING, default='.', help="The folder to list") +@arguments.NODE() +def lsfunction(ls_long, path, node): + """ + List directory content on remote RemoteData objects. + """ + import datetime + try: + content = node.listdir_withattributes(path=path) + except (IOError, OSError) as err: + echo.echo_critical("Unable to access the remote folder" + " or file, check if it exists.\n" + "Original error: {}".format(str(err))) + for metadata in content: + if ls_long: + mtime = datetime.datetime.fromtimestamp(metadata['attributes'].st_mtime) + pre_line = '{} {:10} {} '.format( + get_mode_string(metadata['attributes'].st_mode), metadata['attributes'].st_size, + mtime.strftime("%d %b %Y %H:%M")) + click.echo(pre_line, nl=False) + if metadata['isdir']: + click.echo(click.style(metadata['name'], fg='blue')) + else: + click.echo(metadata['name']) + + +@remote.command('cat') +@arguments.NODE() +@click.argument('path', type=click.STRING) +def cat(node, path): + """ + Show the content of remote files in RemoteData objects. + """ + import os + import sys + import tempfile + try: + with tempfile.NamedTemporaryFile(delete=False) as tmpf: + tmpf.close() + node.getfile(path, tmpf.name) + with open(tmpf.name) as fobj: + sys.stdout.write(fobj.read()) + except IOError as err: + click.echo("ERROR {}: {}".format(err.errno, str(err)), err=True) + sys.exit(1) + + try: + os.remove(tmpf.name) + except OSError: + # If you cannot delete, ignore (maybe I didn't manage to create it in the first place + pass + + +@remote.command('show') +@arguments.NODE() +def show(node): + """ + Show information on a RemoteData object. + """ + click.echo("- Remote computer name:") + click.echo(" {}".format(node.get_computer_name())) + click.echo("- Remote folder full path:") + click.echo(" {}".format(node.get_remote_path())) diff --git a/aiida/cmdline/commands/data/show.py b/aiida/cmdline/commands/data/show.py new file mode 100644 index 0000000000..06f4e0e78f --- /dev/null +++ b/aiida/cmdline/commands/data/show.py @@ -0,0 +1,210 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida_core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +""" +This allows to manage showfunctionality to all data types. +""" +import click +from aiida.cmdline.utils import echo +from aiida.cmdline.params import arguments +from aiida.cmdline.params.options.multivalue import MultipleValueOption +from aiida.common.exceptions import MultipleObjectsError + +SHOW_OPTIONS = [ + arguments.NODES(), + click.option( + '-f', + '--format', + 'show_format', + type=click.Choice(['jmol', 'xcrysden', 'mpl_heatmap', 'mpl_pos']), + default='jmol', + help="Type of the visualization format/tool"), + click.option( + '--step', + type=click.INT, + default=None, + help="ID of the trajectory step. If none is supplied, all" + " steps are explored."), + click.option('-c', '--contour', type=click.FLOAT, cls=MultipleValueOption, default=None, help="Isovalues to plot"), + click.option( + '--sampling-stepsize', + type=click.INT, + default=None, + help="Sample positions in plot every sampling_stepsize" + " timestep"), + click.option( + '--stepsize', + type=click.INT, + default=None, + help="The stepsize for the trajectory, set it higher" + " to reduce number of points"), + click.option('--mintime', type=click.INT, default=None, help="The time to plot from"), + click.option('--maxtime', type=click.INT, default=None, help="The time to plot to"), + click.option( + '-e', + '--elements', + type=click.STRING, + cls=MultipleValueOption, + default=None, + help="Show only atoms of that species"), + click.option('--indices', type=click.INT, cls=MultipleValueOption, default=None, help="Show only these indices"), + click.option( + '--dont-block', 'block', is_flag=True, default=True, help="Don't block interpreter when showing plot."), +] + + +def show_options(func): + for option in reversed(SHOW_OPTIONS): + func = option(func) + + return func + + +def _show_jmol(exec_name, trajectory_list, **kwargs): + """ + Plugin for jmol + """ + import tempfile + import subprocess + + # pylint: disable=protected-access + with tempfile.NamedTemporaryFile() as tmpf: + for trajectory in trajectory_list: + tmpf.write(trajectory._exportstring('cif', **kwargs)[0]) + tmpf.flush() + + try: + subprocess.check_output([exec_name, tmpf.name]) + except subprocess.CalledProcessError: + # The program died: just print a message + echo.echo_info("the call to {} ended with an error.".format(exec_name)) + except OSError as err: + if err.errno == 2: + echo.echo_critical("No executable '{}' found. Add to the path, " + "or try with an absolute path.".format(exec_name)) + else: + raise + + +def _show_xcrysden(exec_name, object_list, **kwargs): + """ + Plugin for xcrysden + """ + import tempfile + import subprocess + + if len(object_list) > 1: + raise MultipleObjectsError("Visualization of multiple trajectories " "is not implemented") + obj = object_list[0] + + # pylint: disable=protected-access + with tempfile.NamedTemporaryFile(suffix='.xsf') as tmpf: + tmpf.write(obj._exportstring('xsf', **kwargs)[0]) + tmpf.flush() + + try: + subprocess.check_output([exec_name, '--xsf', tmpf.name]) + except subprocess.CalledProcessError: + # The program died: just print a message + echo.echo_info("the call to {} ended with an error.".format(exec_name)) + except OSError as err: + if err.errno == 2: + echo.echo_critical("No executable '{}' found. Add to the path, " + "or try with an absolute path.".format(exec_name)) + else: + raise + + +# pylint: disable=unused-argument +def _show_mpl_pos(exec_name, trajectory_list, **kwargs): + """ + Produces a matplotlib plot of the trajectory + """ + for traj in trajectory_list: + traj.show_mpl_pos(**kwargs) + + +# pylint: disable=unused-argument +def _show_mpl_heatmap(exec_name, trajectory_list, **kwargs): + """ + Produces a matplotlib plot of the trajectory + """ + for traj in trajectory_list: + traj.show_mpl_heatmap(**kwargs) + + +# pylint: disable=unused-argument +def _show_ase(exec_name, structure_list): + """ + Plugin to show the structure with the ASE visualizer + """ + try: + from ase.visualize import view + for structure in structure_list: + view(structure.get_ase()) + except ImportError: + raise + + +def _show_vesta(exec_name, structure_list): + """ + Plugin for VESTA + This VESTA plugin was added by Yue-Wen FANG and Abel Carreras + at Kyoto University in the group of Prof. Isao Tanaka's lab + + """ + import tempfile + import subprocess + + # pylint: disable=protected-access + with tempfile.NamedTemporaryFile(suffix='.cif') as tmpf: + for structure in structure_list: + tmpf.write(structure._exportstring('cif')[0]) + tmpf.flush() + + try: + subprocess.check_output([exec_name, tmpf.name]) + except subprocess.CalledProcessError: + # The program died: just print a message + echo.echo_info("the call to {} ended with an error.".format(exec_name)) + except OSError as err: + if err.errno == 2: + echo.echo_critical("No executable '{}' found. Add to the path, " + "or try with an absolute path.".format(exec_name)) + else: + raise + + +def _show_vmd(exec_name, structure_list): + """ + Plugin for vmd + """ + import tempfile + import subprocess + + if len(structure_list) > 1: + raise MultipleObjectsError("Visualization of multiple objects " "is not implemented") + structure = structure_list[0] + + # pylint: disable=protected-access + with tempfile.NamedTemporaryFile(suffix='.xsf') as tmpf: + tmpf.write(structure._exportstring('xsf')[0]) + tmpf.flush() + + try: + subprocess.check_output([exec_name, tmpf.name]) + except subprocess.CalledProcessError: + # The program died: just print a message + echo.echo_info("the call to {} ended with an error.".format(exec_name)) + except OSError as err: + if err.errno == 2: + echo.echo_critical("No executable '{}' found. Add to the path, " + "or try with an absolute path.".format(exec_name)) + else: + raise diff --git a/aiida/cmdline/commands/data/structure.py b/aiida/cmdline/commands/data/structure.py new file mode 100644 index 0000000000..0f0e3aeed3 --- /dev/null +++ b/aiida/cmdline/commands/data/structure.py @@ -0,0 +1,374 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida_core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +""" +This allows to manage StructureData objects from command line. +""" +import click +from aiida.cmdline.commands.data.list import _list, list_options +from aiida.cmdline.commands.data.export import _export, export_options +from aiida.cmdline.commands.data.deposit import deposit_tcod, deposit_options +from aiida.cmdline.params.options.multivalue import MultipleValueOption +from aiida.cmdline.commands import verdi_data +from aiida.cmdline.params import arguments +from aiida.backends.utils import load_dbenv, is_dbenv_loaded +from aiida.cmdline.utils import echo + + +# pylint: disable=unused-argument +@verdi_data.group('structure') +@click.pass_context +def structure(ctx): + """ + Manipulation of the structures + """ + pass + + +@structure.command('show') +@arguments.NODES() +@click.option( + '-f', + '--format', + 'given_format', + type=click.Choice(['ase', 'jmol', 'vesta', 'vmd', 'xcrysden']), + default='ase', + help="Type of the visualization format/tool") +def show(nodes, given_format): + """ + Visualize StructureData objects + """ + from aiida.cmdline.commands.data.show import _show_jmol + from aiida.cmdline.commands.data.show import _show_ase + from aiida.cmdline.commands.data.show import _show_vesta + from aiida.cmdline.commands.data.show import _show_vmd + from aiida.cmdline.commands.data.show import _show_xcrysden + from aiida.orm.data.structure import StructureData + for node in nodes: + if not isinstance(node, StructureData): + echo.echo_critical("Node {} is of class {} instead " "of {}".format(node, type(node), StructureData)) + if given_format == "ase": + _show_ase(given_format, nodes) + elif given_format == "jmol": + _show_jmol(given_format, nodes) + elif given_format == "vesta": + _show_vesta(given_format, nodes) + elif given_format == "vmd": + _show_vmd(given_format, nodes) + elif given_format == "xcrysden": + _show_xcrysden(given_format, nodes) + else: + raise NotImplementedError("The format {} is not yet implemented".format(given_format)) + + +PROJECT_HEADERS = ["Id", "Label", "Kinds", "Sites"] + + +# pylint: disable=too-many-locals,too-many-branches +@structure.command('list') +@list_options +@click.option( + '-f', + '--formulamode', + type=click.Choice(['hill', 'hill_compact', 'reduce', 'group', 'count', 'count_compact']), + default='hill', + help="Formula printing mode (if None, does not print the formula)") +@click.option( + '-e', + '--elements', + type=click.STRING, + cls=MultipleValueOption, + default=None, + help="Print only the objects that" + " contain desired elements") +def list_structures(elements, raw, formulamode, past_days, groups, all_users): + """ + List stored StructureData objects + """ + from aiida.orm.data.structure import StructureData + from aiida.orm.data.structure import (get_formula, get_symbols_string) + from tabulate import tabulate + + elements_only = False + lst = _list(StructureData, PROJECT_HEADERS, elements, elements_only, formulamode, past_days, groups, all_users) + + entry_list = [] + for [pid, label, akinds, asites] in lst: + # If symbols are defined there is a filtering of the structures + # based on the element + # When QueryBuilder will support this (attribute)s filtering, + # it will be pushed in the query. + if elements is not None: + all_symbols = [_["symbols"][0] for _ in akinds] + if not any([s in elements for s in all_symbols]): + continue + + if elements_only: + echo.echo_critical("Not implemented elements-only search") + + # We want only the StructureData that have attributes + if akinds is None or asites is None: + continue + + symbol_dict = {} + for k in akinds: + symbols = k['symbols'] + weights = k['weights'] + symbol_dict[k['name']] = get_symbols_string(symbols, weights) + + try: + symbol_list = [] + for site in asites: + symbol_list.append(symbol_dict[site['kind_name']]) + formula = get_formula(symbol_list, mode=formulamode) + # If for some reason there is no kind with the name + # referenced by the site + except KeyError: + formula = "<>" + entry_list.append([str(pid), str(formula), label]) + + counter = 0 + struct_list_data = list() + if not raw: + struct_list_data.append(PROJECT_HEADERS) + for entry in entry_list: + for i, value in enumerate(entry): + if isinstance(value, list): + entry[i] = ",".join(value) + for i in range(len(entry), len(PROJECT_HEADERS)): + entry.append(None) + counter += 1 + struct_list_data.extend(entry_list) + if raw: + echo.echo(tabulate(struct_list_data, tablefmt='plain')) + else: + echo.echo(tabulate(struct_list_data, headers="firstrow")) + echo.echo("\nTotal results: {}\n".format(counter)) + + +SUPPORTED_FORMATS = ['cif', 'tcod', 'xsf', 'xyz'] + + +# XYZ for alloys or systems with vacancies not implemented. +# supported_formats = ['cif', 'tcod', 'xsf'] +@structure.command('export') +@click.option( + '-y', + '--format', + type=click.Choice(SUPPORTED_FORMATS), + default='xyz', + # default='cif', + help="Type of the exported file.") +@export_options +def export(**kwargs): + """ + Export structure + """ + from aiida.orm.data.structure import StructureData + + node = kwargs.pop('node') + output = kwargs.pop('output') + export_format = kwargs.pop('format') + force = kwargs.pop('force') + + for key, value in kwargs.items(): + if value is None: + kwargs.pop(key) + + if not isinstance(node, StructureData): + echo.echo_critical("Node {} is of class {} instead of {}".format(node, type(node), StructureData)) + _export(node, output, export_format, other_args=kwargs, overwrite=force) + + +@structure.command('deposit') +@deposit_options +def deposit(**kwargs): + """ + Deposit StructureData object + """ + from aiida.orm.data.structure import StructureData + if not is_dbenv_loaded(): + load_dbenv() + node = kwargs.pop('node') + deposition_type = kwargs.pop('deposition_type') + parameter_data = kwargs.pop('parameter_data') + + #if kwargs['database'] is None: + #echo.echo_critical("Default database is not defined, please specify.") + kwargs.pop('database') # looks like a bug, but deposit function called inside + # deposit_tcod complains about the 'database' keywords argument + + for key, value in kwargs.items(): + if value is None: + kwargs.pop(key) + + if not isinstance(node, StructureData): + echo.echo_critical("Node {} is of class {} instead of {}".format(node, type(node), StructureData)) + echo.echo(deposit_tcod(node, deposition_type, parameter_data, **kwargs)) + + +def _import_xyz(filename, **kwargs): + """ + Imports an XYZ-file. + """ + from os.path import abspath + from aiida.orm.data.structure import StructureData + + vacuum_addition = kwargs.pop('vacuum_addition') + vacuum_factor = kwargs.pop('vacuum_factor') + pbc = [bool(i) for i in kwargs.pop('pbc')] + store = kwargs.pop('store') + view_in_ase = kwargs.pop('view') + + echo.echo('importing XYZ-structure from: \n {}'.format(abspath(filename))) + filepath = abspath(filename) + with open(filepath) as fobj: + xyz_txt = fobj.read() + new_structure = StructureData() + # pylint: disable=protected-access + try: + new_structure._parse_xyz(xyz_txt) + new_structure._adjust_default_cell(vacuum_addition=vacuum_addition, vacuum_factor=vacuum_factor, pbc=pbc) + + if store: + new_structure.store() + if view_in_ase: + from ase.visualize import view + view(new_structure.get_ase()) + echo.echo(' Succesfully imported structure {}, ' + '(PK = {})'.format(new_structure.get_formula(), new_structure.pk)) + + except ValueError as err: + echo.echo_critical(err) + + +def _import_pwi(filename, **kwargs): + """ + Imports a structure from a quantumespresso input file. + """ + from os.path import abspath + try: + from qe_tools.parsers.pwinputparser import PwInputFile + except ImportError: + echo.echo_critical("You have not installed the package qe-tools. \n" + "You can install it with: pip install qe-tools") + + store = kwargs.pop('store') + view_in_ase = kwargs.pop('view') + + echo.echo('importing structure from: \n {}'.format(abspath(filename))) + filepath = abspath(filename) + + try: + inputparser = PwInputFile(filepath) + new_structure = inputparser.get_structuredata() + + if store: + new_structure.store() + if view_in_ase: + from ase.visualize import view + view(new_structure.get_ase()) + echo.echo(' Succesfully imported structure {}, ' + '(PK = {})'.format(new_structure.get_formula(), new_structure.pk)) + + except ValueError as err: + echo.echo_critical(err) + + +def _import_ase(filename, **kwargs): + """ + Imports a structure in a number of formats using the ASE routines. + """ + from os.path import abspath + from aiida.orm.data.structure import StructureData + + try: + import ase.io + except ImportError: + echo.echo_critical("You have not installed the package ase. \n" "You can install it with: pip install ase") + + store = kwargs.pop('store') + view_in_ase = kwargs.pop('view') + + echo.echo('importing structure from: \n {}'.format(abspath(filename))) + filepath = abspath(filename) + + try: + asecell = ase.io.read(filepath) + new_structure = StructureData(ase=asecell) + + if store: + new_structure.store() + if view_in_ase: + from ase.visualize import view + view(new_structure.get_ase()) + echo.echo(' Succesfully imported structure {}, ' + '(PK = {})'.format(new_structure.get_formula(), new_structure.pk)) + + except ValueError as err: + echo.echo_critical(err) + + +# pylint: disable=too-many-arguments +@structure.command('import') +@click.argument('filename', type=click.Path(exists=True, dir_okay=False, resolve_path=True)) +@click.option( + '-f', + '--format', + 'given_format', + type=click.Choice(['ase', 'pwi', 'xyz']), + default='xyz', + help="Type of the imported file.") +@click.option( + '--vacuum-factor', + type=click.FLOAT, + default=1.0, + help="The factor by which the cell accomodating the" + " structure should be increased, default: 1.0") +@click.option( + '--vacuum-addition', + type=click.FLOAT, + default=10.0, + help="The distance to add to the unit cell after" + " vacuum factor was applied to expand in each" + " dimension, default: 10.0") +@click.option( + '--pbc', + type=click.INT, + nargs=3, + default=[0, 0, 0], + help="Set periodic boundary conditions for each" + " lattice direction, 0 for no periodicity, any" + " other integer for periodicity") +@click.option('--view', is_flag=True, default=False, help='View resulting structure using ASE.') +@click.option('--dont-store', 'store', is_flag=True, default=True, help='Do not store the structure in AiiDA database.') +def structure_import(filename, given_format, vacuum_factor, vacuum_addition, pbc, view, store): + """ + Import structure + """ + args = {} + if vacuum_factor is not None: + args['vacuum_factor'] = vacuum_factor + if vacuum_addition is not None: + args['vacuum_addition'] = vacuum_addition + if pbc is not None: + args['pbc'] = pbc + if view is not None: + args['view'] = view + if store is not None: + args['store'] = store + + if given_format == "ase": + _import_ase(filename, **args) + elif given_format == "pwi": + _import_pwi(filename, **args) + elif given_format == "xyz": + _import_xyz(filename, **args) + else: + raise NotImplementedError("The format {} is not yet implemented".format(given_format)) diff --git a/aiida/cmdline/commands/data/trajectory.py b/aiida/cmdline/commands/data/trajectory.py new file mode 100644 index 0000000000..bb55b2e967 --- /dev/null +++ b/aiida/cmdline/commands/data/trajectory.py @@ -0,0 +1,164 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida_core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +""" +This allows to manage TrajectoryData objects from command line. +""" +import click +from aiida.cmdline.commands import verdi_data +from aiida.cmdline.commands.data.export import _export, export_options +from aiida.cmdline.utils import echo +from aiida.cmdline.commands.data.list import _list, list_options +from aiida.backends.utils import load_dbenv, is_dbenv_loaded +from aiida.cmdline.commands.data.show import show_options +from aiida.cmdline.commands.data.deposit import deposit_options, deposit_tcod + + +# pylint: disable=unused-argument +@verdi_data.group('trajectory') +@click.pass_context +def trajectory(ctx): + """ + View and manipulate TrajectoryData instances. + """ + pass + + +PROJECT_HEADERS = ["Id", "Label"] + + +@trajectory.command('list') +@list_options +def list_trajections(raw, past_days, groups, all_users): + """ + List trajectories stored in database. + """ + from aiida.orm.data.array.trajectory import TrajectoryData + from tabulate import tabulate + elements = None + elements_only = False + formulamode = None + entry_list = _list(TrajectoryData, PROJECT_HEADERS, elements, elements_only, formulamode, past_days, groups, + all_users) + + counter = 0 + struct_list_data = list() + if not raw: + struct_list_data.append(PROJECT_HEADERS) + for entry in entry_list: + for i, value in enumerate(entry): + if isinstance(value, list): + entry[i] = ",".join(value) + for i in range(len(entry), len(PROJECT_HEADERS)): + entry.append(None) + counter += 1 + struct_list_data.extend(entry_list) + if raw: + echo.echo(tabulate(struct_list_data, tablefmt='plain')) + else: + echo.echo(tabulate(struct_list_data, headers="firstrow")) + echo.echo("\nTotal results: {}\n".format(counter)) + + +@trajectory.command('show') +@show_options +def show(**kwargs): + """ + Visualize trajectory + """ + from aiida.orm.data.array.trajectory import TrajectoryData + from aiida.cmdline.commands.data.show import _show_jmol + from aiida.cmdline.commands.data.show import _show_xcrysden + from aiida.cmdline.commands.data.show import _show_mpl_pos + from aiida.cmdline.commands.data.show import _show_mpl_heatmap + nodes = kwargs.pop('nodes') + given_format = kwargs.pop('show_format') + for node in nodes: + if not isinstance(node, TrajectoryData): + echo.echo_critical("Node {} is of class {} instead " "of {}".format(node, type(node), TrajectoryData)) + + for key, value in kwargs.items(): + if value is None: + kwargs.pop(key) + + if given_format == "jmol": + _show_jmol(given_format, nodes, **kwargs) + elif given_format == "xcrysden": + _show_xcrysden(given_format, nodes, **kwargs) + elif given_format == "mpl_pos": + _show_mpl_pos(given_format, nodes, **kwargs) + elif given_format == "mpl_heatmap": + _show_mpl_heatmap(given_format, nodes, **kwargs) + else: + raise NotImplementedError("The format {} is not yet implemented".format(given_format)) + + +SUPPORTED_FORMATS = ['cif', 'tcod', 'xsf'] + + +@trajectory.command('export') +@click.option('-y', '--format', type=click.Choice(SUPPORTED_FORMATS), default='cif', help="Type of the exported file.") +@click.option( + '--step', + 'trajectory_index', + type=click.INT, + default=None, + help="ID of the trajectory step. If none is supplied, all" + " steps are explored.") +@export_options +def export(**kwargs): + """ + Export trajectory + """ + from aiida.orm.data.array.trajectory import TrajectoryData + + node = kwargs.pop('node') + output = kwargs.pop('output') + export_format = kwargs.pop('format') + force = kwargs.pop('force') + + for key, value in kwargs.items(): + if value is None: + kwargs.pop(key) + + if not isinstance(node, TrajectoryData): + echo.echo_critical("Node {} is of class {} instead of {}".format(node, type(node), TrajectoryData)) + _export(node, output, export_format, other_args=kwargs, overwrite=force) + + +@trajectory.command('deposit') +@click.option( + '--step', + 'trajectory_index', + type=click.INT, + default=1, + help="ID of the trajectory step. If none is " + "supplied, all steps are exported.") +@deposit_options +def deposit(**kwargs): + """ + Deposit trajectory object + """ + from aiida.orm.data.array.trajectory import TrajectoryData + if not is_dbenv_loaded(): + load_dbenv() + node = kwargs.pop('node') + deposition_type = kwargs.pop('deposition_type') + parameter_data = kwargs.pop('parameter_data') + + if kwargs['database'] is None: + echo.echo_critical("Default database is not defined, please specify.") + + for key, value in kwargs.items(): + if value is None: + kwargs.pop(key) + + if not isinstance(node, TrajectoryData): + echo.echo_critical("Node {} is of class {} instead of {}".format(node, type(node), TrajectoryData)) + deposit_tcod(node, deposition_type, parameter_data, **kwargs) diff --git a/aiida/cmdline/commands/data/upf.py b/aiida/cmdline/commands/data/upf.py new file mode 100644 index 0000000000..eb69260e4d --- /dev/null +++ b/aiida/cmdline/commands/data/upf.py @@ -0,0 +1,157 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida_core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +""" +This allows to manage TrajectoryData objects from command line. +""" +import click +from aiida.cmdline.utils import echo +from aiida.cmdline.commands import verdi_data +from aiida.cmdline.params.options.multivalue import MultipleValueOption + + +# pylint: disable=unused-argument +@verdi_data.group('upf') +@click.pass_context +def upf(ctx): + """ + Manipulation of the upf families + """ + pass + + +@upf.command('uploadfamily') +@click.argument('folder', type=click.Path(exists=True, file_okay=False, resolve_path=True)) +@click.argument('group_name', type=click.STRING) +@click.argument('group_description', type=click.STRING) +@click.option( + '--stop-if-existing', + is_flag=True, + default=False, + help='Interrupt pseudos import if a pseudo was already present in the AiiDA database') +def uploadfamily(folder, group_name, group_description, stop_if_existing): + """ + Upload a new pseudopotential family. + + Returns the numbers of files found and the number of nodes uploaded. + + Call without parameters to get some help. + """ + import aiida.orm.data.upf as upf_ + files_found, files_uploaded = upf_.upload_upf_family(folder, group_name, group_description, stop_if_existing) + echo.echo_success("UPF files found: {}. New files uploaded: {}".format(files_found, files_uploaded)) + + +@upf.command('listfamilies') +@click.option( + '-d', + '--with-description', + 'with_description', + is_flag=True, + default=False, + help="Show also the description for the UPF family") +@click.option( + '-e', + '--elements', + 'elements', + type=click.STRING, + cls=MultipleValueOption, + default=None, + help="Filter the families only to those containing " + "a pseudo for each of the specified elements") +def listfamilies(elements, with_description): + """ + Print on screen the list of upf families installed + """ + from aiida.orm import DataFactory + from aiida.orm.data.upf import UPFGROUP_TYPE + + # pylint: disable=invalid-name + UpfData = DataFactory('upf') + from aiida.orm.querybuilder import QueryBuilder + from aiida.orm.group import Group + qb = QueryBuilder() + qb.append(UpfData, tag='upfdata') + if elements is not None: + qb.add_filter(UpfData, {'attributes.element': {'in': elements}}) + qb.append( + Group, + group_of='upfdata', + tag='group', + project=["name", "description"], + filters={"type": { + '==': UPFGROUP_TYPE + }}) + + qb.distinct() + if qb.count() > 0: + for res in qb.dict(): + group_name = res.get("group").get("name") + group_desc = res.get("group").get("description") + qb = QueryBuilder() + qb.append(Group, tag='thisgroup', filters={"name": {'like': group_name}}) + qb.append(UpfData, project=["id"], member_of='thisgroup') + + if with_description: + description_string = ": {}".format(group_desc) + else: + description_string = "" + + echo.echo_success("* {} [{} pseudos]{}".format(group_name, qb.count(), description_string)) + + else: + echo.echo_warning("No valid UPF pseudopotential family found.") + + +@upf.command('exportfamily') +@click.argument('folder', type=click.Path(exists=True, file_okay=False, resolve_path=True)) +@click.argument('group_name', type=click.STRING) +def exportfamily(folder, group_name): + """ + Export a pseudopotential family into a folder. + Call without parameters to get some help. + """ + import os + from aiida.common.exceptions import NotExistent + from aiida.orm import DataFactory + + # pylint: disable=invalid-name + UpfData = DataFactory('upf') + try: + group = UpfData.get_upf_group(group_name) + except NotExistent: + echo.echo_critical("upf family {} not found".format(group_name)) + + # pylint: disable=protected-access + for node in group.nodes: + dest_path = os.path.join(folder, node.filename) + if not os.path.isfile(dest_path): + with open(dest_path, 'w') as dest: + with node._get_folder_pathsubfolder.open(node.filename) as source: + dest.write(source.read()) + else: + echo.echo_warning("File {} is already present in the " "destination folder".format(node.filename)) + + +@upf.command('import') +@click.argument('filename', type=click.Path(exists=True, dir_okay=False, resolve_path=True)) +@click.option( + '-f', + '--format', + 'given_format', + type=click.Choice(['upf']), + default='upf', + help="Format of the pseudopotential file") +def import_upf(filename, given_format): + """ + Import upf data object + """ + from aiida.orm.data.upf import UpfData + node, _ = UpfData.get_or_create(filename) + echo.echo_success("Imported: {}".format(node)) diff --git a/aiida/cmdline/commands/devel.py b/aiida/cmdline/commands/devel.py index 325d5f92f7..d049938ad5 100644 --- a/aiida/cmdline/commands/devel.py +++ b/aiida/cmdline/commands/devel.py @@ -101,7 +101,8 @@ def __init__(self, *args, **kwargs): from aiida.backends.tests import get_db_test_names from aiida.backends import settings - load_dbenv() + if not is_dbenv_loaded: + load_dbenv() db_test_list = get_db_test_names() super(Devel, self).__init__(*args, **kwargs) diff --git a/aiida/cmdline/params/options/overridable.py b/aiida/cmdline/params/options/overridable.py index 9da1c45efd..e325876f72 100644 --- a/aiida/cmdline/params/options/overridable.py +++ b/aiida/cmdline/params/options/overridable.py @@ -16,7 +16,7 @@ class OverridableOption(object): help message for an option on a per-command basis. Sometimes the option should be prompted for if it is not given On some commands an option might take any folder path, while on another the path only has to exist. - Overridable options store the arguments to click.option and only instanciate the click.Option on call, + Overridable options store the arguments to click.option and only instantiate the click.Option on call, kwargs given to ``__call__`` override the stored ones. Example:: diff --git a/docs/source/developer_guide/data_cmdline.rst b/docs/source/developer_guide/data_cmdline.rst index 78c53eb16c..9bf049df45 100644 --- a/docs/source/developer_guide/data_cmdline.rst +++ b/docs/source/developer_guide/data_cmdline.rst @@ -24,17 +24,15 @@ commands is placed in ``_`` class inside are implemented in corresponding classes: -* :py:class:`~aiida.cmdline.commands.data.Listable` -* :py:class:`~aiida.cmdline.commands.data.Visualizable` -* :py:class:`~aiida.cmdline.commands.data.Importable` -* :py:class:`~aiida.cmdline.commands.data.Exportable`, +* :py:class:`~aiida.cmdline.commands.data.list` +* :py:class:`~aiida.cmdline.commands.data.export`, which are inherited by ``_`` classes (multiple inheritance is possible). Actions ``show``, ``import`` and ``export`` can be extended with new format plugins simply by adding additional methods in ``_`` (these are automatically detected). Action ``list`` can be extended by overriding default methods of the -:py:class:`~aiida.cmdline.commands.data.Listable`. +:py:class:`~aiida.cmdline.commands.data.list`. Adding plugins for ``show``, ``import``, ``export`` and like ------------------------------------------------------------ @@ -107,22 +105,16 @@ Implementing ``list`` As listing of data nodes can be extended with filters, controllable using command line parameters, the code of -:py:class:`~aiida.cmdline.commands.data.Listable` is split into a few +:py:class:`~aiida.cmdline.commands.data.list` is split into a few separate methods, that can be individually overridden: -* :py:class:`~aiida.cmdline.commands.data.Listable.list`: +* :py:class:`~aiida.cmdline.commands.data.list._list`: the main method, parsing the command line arguments and printing the data node information to the standard output; -* :py:class:`~aiida.cmdline.commands.data.Listable.query`: +* :py:class:`~aiida.cmdline.commands.data.list.query`: takes the parsed command line arguments and performs a query on the database, returns table of unformatted strings, representing the hits; -* :py:class:`~aiida.cmdline.commands.data.Listable.append_list_cmdline_arguments`: - informs the command line argument parser about additional, user-defined - parameters, used to control the - :py:class:`~aiida.cmdline.commands.data.Listable.query` function; -* :py:class:`~aiida.cmdline.commands.data.Listable.get_column_names`: - returns the names of columns to be printed by - :py:class:`~aiida.cmdline.commands.data.Listable.list` method. + :py:class:`~aiida.cmdline.commands.data.list.query` function; Adding a ``verdi`` command