diff --git a/install_usermodule_r8.sh b/install_usermodule_r8.sh
index 053daa25..ce1ecb21 100755
--- a/install_usermodule_r8.sh
+++ b/install_usermodule_r8.sh
@@ -116,9 +116,12 @@ EOF
install_snap() {
MODULE_VERSION="$1"
BASE_MODULE_VERSION="$2"
+
+ source /modules/rhel8/conda/install/etc/profile.d/conda.sh
+
BASE_MODULE_PREFIX=/modules/rhel8/user-apps/fou-modules/SnapPy/"$BASE_MODULE_VERSION"/
- source /modules/rhel8/conda/install/etc/profile.d/conda.sh "$BASE_MODULE_PREFIX"
+ conda activate "$BASE_MODULE_PREFIX"
MODULE_PREFIX=/modules/rhel8/user-apps/fou-modules/SnapPy/"$MODULE_VERSION"/
mkdir --parent -- "$MODULE_PREFIX/bin"
diff --git a/utils/SnapPy/METNO/Alvin.py b/utils/SnapPy/METNO/Alvin.py
index 108a1447..d113a187 100644
--- a/utils/SnapPy/METNO/Alvin.py
+++ b/utils/SnapPy/METNO/Alvin.py
@@ -1,25 +1,25 @@
# SNAP: Servere Nuclear Accident Programme
# Copyright (C) 1992-2017 Norwegian Meteorological Institute
-#
-# This file is part of SNAP. SNAP is free software: you can
-# redistribute it and/or modify it under the terms of the
-# GNU General Public License as published by the
+#
+# This file is part of SNAP. SNAP is free software: you can
+# redistribute it and/or modify it under the terms of the
+# GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
-#
+#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
-#
+#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
#
-"""
+'''
Created on Apr 28, 2017
@author: heikok
-"""
+'''
import os
from subprocess import TimeoutExpired
from time import sleep
@@ -31,24 +31,22 @@
class Alvin(HPC):
- """
+ '''
Implementation of a HPC machine for alvin.nsc.liu.se
- """
+ '''
+
def __init__(self):
- """
+ '''
Constructor
- """
- connection = SSHConnection(
- username="metno_op", machine="alvin.nsc.liu.se", port=22
- )
+ '''
+ connection = SSHConnection(username="metno_op", machine="alvin.nsc.liu.se", port=22)
queue = SLURMQueue()
super().__init__(connection, queue)
class TestFrost(unittest.TestCase):
- """tests for alvin, only working when having an existing forecast account on alvin"""
-
+ '''tests for alvin, only working when having an existing forecast account on alvin'''
def setUp(self):
unittest.TestCase.setUp(self)
self.hpc = HPC.by_name("alvin")
@@ -58,7 +56,7 @@ def setUp(self):
def tearDown(self):
unittest.TestCase.tearDown(self)
for f in self.testFiles:
- if os.path.exists(f):
+ if (os.path.exists(f)):
os.unlink(f)
def test_connect(self):
@@ -66,6 +64,7 @@ def test_connect(self):
self.assertEqual(retval, 0, "command succeeded")
self.assertEqual(int(out), 5, "command output correct")
+
def test_timeout(self):
with self.assertRaises(TimeoutExpired):
self.hpc.syscall("sleep", ["5"], timeout=1)
@@ -75,8 +74,7 @@ def test_full(self):
self.hpc.syscall("rm", ["-r", self.rdir])
self.hpc.syscall("mkdir", ["-p", self.rdir])
with open(self.testFiles[0], "w") as fh:
- fh.write(
- """#! /bin/bash
+ fh.write('''#! /bin/bash
#SBATCH -A met
#SBATCH --nodes=1 --ntasks-per-node=1 --time=01:00:00
@@ -86,10 +84,7 @@ def test_full(self):
sleep 8
echo "finished" > {status}
- """.format(
- status=status_file, rdir=self.rdir
- )
- )
+ '''.format(status=status_file, rdir=self.rdir))
self.hpc.put_files([self.testFiles[0]], self.rdir)
qjob = self.hpc.submit_job(os.path.join(self.rdir, self.testFiles[0]), [])
self.assertIsNotNone(qjob, "job submitted")
@@ -114,6 +109,5 @@ def test_full(self):
self.hpc.syscall("rm", ["-r", self.rdir])
-
if __name__ == "__main__":
unittest.main()
diff --git a/utils/SnapPy/METNO/DirectConnection.py b/utils/SnapPy/METNO/DirectConnection.py
index 6061bd8f..a5694684 100644
--- a/utils/SnapPy/METNO/DirectConnection.py
+++ b/utils/SnapPy/METNO/DirectConnection.py
@@ -1,25 +1,25 @@
# SNAP: Servere Nuclear Accident Programme
# Copyright (C) 1992-2017 Norwegian Meteorological Institute
-#
-# This file is part of SNAP. SNAP is free software: you can
-# redistribute it and/or modify it under the terms of the
-# GNU General Public License as published by the
+#
+# This file is part of SNAP. SNAP is free software: you can
+# redistribute it and/or modify it under the terms of the
+# GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
-#
+#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
-#
+#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
#
-"""
+'''
Created on Aug 08, 2017
@author: heikok
-"""
+'''
import os
import shutil
@@ -31,10 +31,11 @@
class DirectConnection(Connection):
- """no connection, working directly on that machine"""
+ '''no connection, working directly on that machine
+ '''
charset = typed_property("charset", str)
- """charset of stdout of the machine, usually utf-8"""
+ '''charset of stdout of the machine, usually utf-8'''
def __init__(self):
super().__init__()
@@ -55,28 +56,25 @@ def get_files(self, files, local_path=None, timeout=None):
def syscall(self, program, args, timeout=None):
if sys.version_info > (3, 5, 0):
- proc = subprocess.run(
- [program] + args,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- timeout=timeout,
- )
- return (
- proc.stdout.decode(self.charset),
- proc.stderr.decode(self.charset),
- proc.returncode,
- )
+ proc = subprocess.run([program]+ args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, timeout=timeout)
+ return (proc.stdout.decode(self.charset),
+ proc.stderr.decode(self.charset),
+ proc.returncode)
else:
try:
output = subprocess.check_output([program] + args, timeout=timeout)
- return (output.decode(self.charset), "", 0)
+ return (output.decode(self.charset),
+ '',
+ 0)
except subprocess.CalledProcessError as cpe:
- return (cpe.output.decode(self.charset), "", cpe.returncode)
+ return (cpe.output.decode(self.charset),
+ '',
+ cpe.returncode)
-class TestDirectConnection(unittest.TestCase):
- """Test for DirectConnection"""
+class TestDirectConnection(unittest.TestCase):
+ '''Test for DirectConnection'''
def setUp(self):
unittest.TestCase.setUp(self)
self.dir1 = os.path.join(os.path.dirname(__file__), "testdir1")
@@ -86,17 +84,19 @@ def setUp(self):
if not os.path.exists(self.dir2):
os.mkdir(self.dir2)
- self.files = ["file1", "file2"]
+ self.files = ['file1', 'file2']
for file in self.files:
infile = os.path.join(self.dir1, file)
if not os.path.exists(infile):
- with open(infile, "w") as ifh:
+ with open(infile, 'w') as ifh:
ifh.write("file: {name}".format(name=infile))
outfile = os.path.join(self.dir2, file)
if os.path.exists(outfile):
os.unlink(outfile)
self.conn = DirectConnection()
+
+
def tearDown(self):
unittest.TestCase.tearDown(self)
for path in (self.dir1, self.dir2):
@@ -120,7 +120,8 @@ def test_get_files(self):
files_o = [os.path.join(self.dir2, x) for x in self.files]
self.conn.get_files(files_i, self.dir2, 5)
for file in files_o:
- self.assertTrue(os.path.exists(file), "file {} exists".format(file))
+ self.assertTrue(os.path.exists(file),
+ "file {} exists".format(file))
os.unlink(file)
def test_put_files(self):
diff --git a/utils/SnapPy/METNO/Frost.py b/utils/SnapPy/METNO/Frost.py
index 2225d0a2..d2dcfbc1 100644
--- a/utils/SnapPy/METNO/Frost.py
+++ b/utils/SnapPy/METNO/Frost.py
@@ -1,25 +1,25 @@
# SNAP: Servere Nuclear Accident Programme
# Copyright (C) 1992-2017 Norwegian Meteorological Institute
-#
-# This file is part of SNAP. SNAP is free software: you can
-# redistribute it and/or modify it under the terms of the
-# GNU General Public License as published by the
+#
+# This file is part of SNAP. SNAP is free software: you can
+# redistribute it and/or modify it under the terms of the
+# GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
-#
+#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
-#
+#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
#
-"""
+'''
Created on Nov 7, 2016
@author: heikok
-"""
+'''
import os
from subprocess import TimeoutExpired
from time import sleep
@@ -31,24 +31,22 @@
class Frost(HPC):
- """
+ '''
Implementation of a HPC machine for frost.nsc.liu.se
- """
+ '''
+
def __init__(self):
- """
+ '''
Constructor
- """
- connection = SSHConnection(
- username="metno_op", machine="frost.nsc.liu.se", port=22
- )
+ '''
+ connection = SSHConnection(username="metno_op", machine="frost.nsc.liu.se", port=22)
queue = SLURMQueue()
super().__init__(connection, queue)
class TestFrost(unittest.TestCase):
- """tests for frost, only working when having an existing forecast account on frost"""
-
+ '''tests for frost, only working when having an existing forecast account on frost'''
def setUp(self):
unittest.TestCase.setUp(self)
self.hpc = HPC.by_name("frost")
@@ -59,7 +57,7 @@ def setUp(self):
def tearDown(self):
unittest.TestCase.tearDown(self)
for f in self.testFiles:
- if os.path.exists(f):
+ if (os.path.exists(f)):
os.unlink(f)
def test_connect(self):
@@ -67,6 +65,7 @@ def test_connect(self):
self.assertEqual(retval, 0, "command succeeded")
self.assertEqual(int(out), 5, "command output correct")
+
def test_timeout(self):
with self.assertRaises(TimeoutExpired):
self.hpc.syscall("sleep", ["5"], timeout=1)
@@ -75,16 +74,15 @@ def test_full(self):
status_file = os.path.join(self.rdir, "status")
self.hpc.syscall("rm", ["-r", self.rdir])
self.hpc.syscall("mkdir", ["-p", self.rdir])
-
+
for f in self.strangeFiles:
- with open(f, "w") as fh:
+ with open(f, 'w') as fh:
fh.write(f)
self.hpc.put_files([f], self.rdir)
self.hpc.syscall("ls", [f])
-
+
with open(self.testFiles[0], "w") as fh:
- fh.write(
- """#! /bin/bash
+ fh.write('''#! /bin/bash
#SBATCH -A met
#SBATCH --nodes=1 --ntasks-per-node=1 --time=01:00:00
@@ -94,10 +92,7 @@ def test_full(self):
sleep 8
echo "finished" > {status}
- """.format(
- status=status_file, rdir=self.rdir
- )
- )
+ '''.format(status=status_file, rdir=self.rdir))
self.hpc.put_files([self.testFiles[0]], self.rdir)
qjob = self.hpc.submit_job(os.path.join(self.rdir, self.testFiles[0]), [])
self.assertIsNotNone(qjob, "job submitted")
@@ -122,6 +117,5 @@ def test_full(self):
self.hpc.syscall("rm", ["-r", self.rdir])
-
if __name__ == "__main__":
unittest.main()
diff --git a/utils/SnapPy/METNO/HPC.py b/utils/SnapPy/METNO/HPC.py
index bed19c4b..a22d62c5 100644
--- a/utils/SnapPy/METNO/HPC.py
+++ b/utils/SnapPy/METNO/HPC.py
@@ -1,127 +1,123 @@
# SNAP: Servere Nuclear Accident Programme
# Copyright (C) 1992-2017 Norwegian Meteorological Institute
-#
-# This file is part of SNAP. SNAP is free software: you can
-# redistribute it and/or modify it under the terms of the
-# GNU General Public License as published by the
+#
+# This file is part of SNAP. SNAP is free software: you can
+# redistribute it and/or modify it under the terms of the
+# GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
-#
+#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
-#
+#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
#
-"""
+'''
Created on Nov 7, 2016
@author: heikok
-"""
+'''
from abc import ABCMeta, abstractmethod
from enum import Enum, unique
-from subprocess import SubprocessError
+from subprocess import CalledProcessError, TimeoutExpired, SubprocessError
import sys
-def typed_property(name, expected_type):
- """Helper function to create properties with types"""
- storage_name = "_" + name
+def typed_property(name,expected_type):
+ '''Helper function to create properties with types'''
+ storage_name='_'+name
@property
def prop(self):
return getattr(self, storage_name)
@prop.setter
- def prop(self, value):
+ def prop(self,value):
if (value is not None) and (not isinstance(value, expected_type)):
- raise TypeError(
- "{} must be a '{}', but is '{}'".format(
- name, expected_type.__name__, value.__class__.__name__
- )
- )
+ raise TypeError('{} must be a \'{}\', but is \'{}\''.format(name, expected_type.__name__, value.__class__.__name__))
setattr(self, storage_name, value)
return prop
class Connection(metaclass=ABCMeta):
- """Baseclass for connections to another machine."""
-
+ '''Baseclass for connections to another machine.'''
def __init__(self):
return
@abstractmethod
def put_files(self, files, remote_path, timeout=None):
- """Put files to a remote machine.
+ '''Put files to a remote machine.
Raises: TimeoutExpired, SubprocessError on failure
- """
+ '''
pass
@abstractmethod
def get_files(self, files, local_path=None, timeout=None):
- """Get files from a remote machine.
+ '''Get files from a remote machine.
Raises: TimeoutExpired, SubprocessError on failure
- """
+ '''
pass
@abstractmethod
def syscall(self, program, args, timeout=None):
- """Call a program remote, eventually setting a timeout.
+ '''Call a program remote, eventually setting a timeout.
Returns: (stdout, stderr, retval) tuple, with retval being the returncode of the command.
Raises: TimeoutExpired, SubprocessError
- """
+ '''
pass
-
class Queue(metaclass=ABCMeta):
def __init__(self):
return
@abstractmethod
def submit_command(self, jobscript, args):
- """return the submit command, e.g. qsub or sbatch
+ '''return the submit command, e.g. qsub or sbatch
Keyword arguments:
jobscript -- the jobscript to submit
args -- arguments to the jobscript
Returns: the complete command as tuple (program, args), e.g. (qsub, [jobscript arg1 arg2 arg3]
- """
+ '''
pass
@abstractmethod
def parse_submit(self, command_output, command_error, returncode):
- """parse the output from the job-submission and return a QJob object
+ '''parse the output from the job-submission and return a QJob object
Returns: QJob on success or None on failure
- """
+ '''
pass
@abstractmethod
def status_command(self, qJob):
- """return the status command for the QJob"""
+ '''return the status command for the QJob'''
pass
@abstractmethod
def delete_command(self, qJob):
- """return the deletion command for the QJob"""
+ '''return the deletion command for the QJob'''
pass
+
@abstractmethod
def parse_status(self, qJob, status_output, status_err, returncode):
- """return the QJobStatus the QJob, except for testing for the status-file"""
+ '''return the QJobStatus the QJob, except for testing for the status-file'''
pass
-class HPC:
- """
+
+class HPC():
+ '''
A HPC is a abstract base-class to send and retrieve data to a HPC machine
or to submit and parse jobs on the HPC machines
@@ -150,8 +146,7 @@ class HPC:
hpc.delete_job(qJob)
- """
-
+ '''
connection = typed_property("connection", Connection)
queue = typed_property("queue", Queue)
@@ -161,7 +156,7 @@ def __init__(self, connection, queue):
return
def put_files(self, files, hpcpath, timeout=None):
- """send files to the HPC-machine
+ '''send files to the HPC-machine
Keyword arguments:
files -- list of files
@@ -170,11 +165,11 @@ def put_files(self, files, hpcpath, timeout=None):
Raises: SubprocessError, e.g. TimeoutExpired if it took to long
Returns: True if all files have been submitted
- """
+ '''
return self.connection.put_files(files, hpcpath, timeout)
def get_files(self, files, localpath=None, timeout=None):
- """retrieve files from the HPC-machine
+ '''retrieve files from the HPC-machine
Keyword arguments:
files -- list of files to retrieve from hpc (relative path to home, or absolute path)
@@ -184,29 +179,29 @@ def get_files(self, files, localpath=None, timeout=None):
Returns: True if all files have been retrieved
Raises: SubprocessError, e.g. TimeoutExpired if it took to long
- """
+ '''
return self.connection.get_files(files, localpath, timeout)
def submit_job(self, jobfile, args):
- """submit a job to the HPC machines queue
+ '''submit a job to the HPC machines queue
Keyword arguments:
jobfile -- hpc-filename to send to the HPC-queue
args -- job-arguments
Returns: a QJob object, None on failure
- """
+ '''
(command, args) = self.queue.submit_command(jobfile, args)
(stdout, stderr, retval) = self.syscall(command, args, timeout=None)
return self.queue.parse_submit(stdout, stderr, retval)
def delete_job(self, qJob):
- """delete a submitted job"""
+ '''delete a submitted job'''
(command, args) = self.queue.delete_command(qJob)
self.syscall(command, args, timeout=None)
def get_status(self, qjob, timeout=10):
- """Get the status of the job.
+ '''Get the status of the job.
Returns: a QJobStatus
running -- the job is still in the queue and indicated running
@@ -214,7 +209,7 @@ def get_status(self, qjob, timeout=10):
finished -- the status_file is accepted or the job is no longer in the queue
failed -- the job is not in the queue and the status_file is not accepted
unknown -- not able to read the status currently (usually timeouts)
- """
+ '''
(command, args) = self.queue.status_command(qjob)
try:
(stdout, stderr, ret_val) = self.syscall(command, args, timeout=timeout)
@@ -225,20 +220,18 @@ def get_status(self, qjob, timeout=10):
if qstatus == QJobStatus.finished and qjob.status_file is not None:
try:
- (status_content, serr, retval) = self.syscall(
- program="cat",
- args=[qjob.status_file.status_filename],
- timeout=timeout,
- )
+ (status_content, serr, retval) = self.syscall(program="cat",
+ args=[qjob.status_file.status_filename],
+ timeout=timeout)
if retval == 0:
if qjob.status_file.finished:
- if status_content.find(qjob.status_file.finished) != -1:
+ if (status_content.find(qjob.status_file.finished) != -1):
qstatus = QJobStatus.finished
else:
qstatus = QJobStatus.failed
else:
qstatus = QJobStatus.finished
- elif retval == 2: # no such file or directory
+ elif retval == 2: # no such file or directory
qstatus = QJobStatus.failed
else:
qstatus = QJobStatus.unknown
@@ -247,74 +240,66 @@ def get_status(self, qjob, timeout=10):
qstatus = QJobStatus.unknown
return qstatus
+
+
def syscall(self, program, args, timeout=None):
- """Call a program with arguments on the machine.
+ '''Call a program with arguments on the machine.
Returns: the output of the program as tuple (stdout, stderr, ret_val), with ret_val being the return-code
Raises: SubprocessError, e.g. TimeoutException if it took to long
- """
+ '''
return self.connection.syscall(program, args, timeout)
@staticmethod
def by_name(name):
- """
+ '''
Initialize a HPC by names, e.g. vilje, frost
- """
- if name == "vilje":
+ '''
+ if (name == "vilje"):
from . import Vilje
-
return Vilje.Vilje()
- elif name == "frost":
+ elif (name == "frost"):
from . import Frost
-
return Frost.Frost()
- elif name == "stratus":
+ elif (name == "stratus"):
from . import Stratus
-
return Stratus.Stratus()
- elif name == "alvin":
+ elif (name == "alvin"):
from . import Alvin
-
return Alvin.Alvin()
- elif name == "ppi_r8b":
+ elif (name == "ppi_r8b"):
from . import PPI_R8B
-
return PPI_R8B.PPI_R8B()
- elif name == "ppi_centos7_direct":
+ elif (name == "ppi_centos7_direct"):
from . import PPI_CentOS7_Direct
-
return PPI_CentOS7_Direct.PPI_CentOS7_Direct()
- elif name == "ppi_r8b_direct":
+ elif (name == "ppi_r8b_direct"):
from . import PPI_R8B_Direct
-
return PPI_R8B_Direct.PPI_R8B_Direct()
else:
raise NotImplementedError("no HPC named '{}'".format(name))
return
-
-class StatusFile:
- """Object to encapsulate the filename and status-strings in a status-file"""
-
+class StatusFile():
+ '''Object to encapsulate the filename and status-strings in a status-file'''
status_filename = typed_property("status_filename", str)
finished = typed_property("finished", str)
- def __init__(self, status_filename, finished=""):
- """Constructor
+ def __init__(self, status_filename, finished = ""):
+ '''Constructor
Keyword arguments:
status_filename -- filename to a statusfile
finished -- optional text the status_file needs to contain if really finished
- """
+ '''
self.status_filename = status_filename
self.finished = finished
return
-
@unique
class QJobStatus(Enum):
- """The job status. A status <= 0 means to continue to wait."""
+ '''The job status. A status <= 0 means to continue to wait.'''
unknown = -2
queued = -1
@@ -324,12 +309,14 @@ class QJobStatus(Enum):
class QJob(metaclass=ABCMeta):
- """Abstract baseclass of jobs submitted to a queue. Besides checking the queue, the job may
- also be controlled by a status_file which needs to be set manually with the status_file property"""
+ '''Abstract baseclass of jobs submitted to a queue. Besides checking the queue, the job may
+ also be controlled by a status_file which needs to be set manually with the status_file property'''
status_file = typed_property("status_file", StatusFile)
def __init__(self):
- """Constructor"""
+ '''Constructor'''
self.status_file = None
return
+
+
diff --git a/utils/SnapPy/METNO/PBSQueue.py b/utils/SnapPy/METNO/PBSQueue.py
index 64476171..cf54c1e5 100644
--- a/utils/SnapPy/METNO/PBSQueue.py
+++ b/utils/SnapPy/METNO/PBSQueue.py
@@ -1,25 +1,25 @@
# SNAP: Servere Nuclear Accident Programme
# Copyright (C) 1992-2017 Norwegian Meteorological Institute
-#
-# This file is part of SNAP. SNAP is free software: you can
-# redistribute it and/or modify it under the terms of the
-# GNU General Public License as published by the
+#
+# This file is part of SNAP. SNAP is free software: you can
+# redistribute it and/or modify it under the terms of the
+# GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
-#
+#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
-#
+#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
#
-"""
+'''
Created on Nov 17, 2016
@author: heikok
-"""
+'''
import re
import sys
import unittest
@@ -36,47 +36,48 @@ def __init__(self, jobid):
class PBSQueue(Queue):
+
def __init__(self):
super().__init__()
def submit_command(self, jobscript, args):
- """return the submit command, e.g. qsub or sbatch
+ '''return the submit command, e.g. qsub or sbatch
Keyword arguments:
jobscript -- the jobscript to submit
args -- arguments to the jobscript
Returns: the complete command as tuple (program, args), e.g. (qsub, [jobscript arg1 arg2 arg3]
- """
+ '''
jobargs = [jobscript]
jobargs.extend(args)
return ("qsub", jobargs)
def parse_submit(self, command_output, command_error, returncode):
- """parse the output from the job-submission and return a QJob object"""
- if returncode == 0:
+ '''parse the output from the job-submission and return a QJob object'''
+ if (returncode == 0):
jobid = command_output.strip()
- assert jobid != ""
+ assert(jobid != '')
return PBSQJob(jobid)
else:
- print(
- "qsub failed with code {}: {}".format(returncode, command_error),
- file=sys.stderr,
- )
+ print("qsub failed with code {}: {}".format(returncode, command_error),
+ file=sys.stderr)
return None
+
def status_command(self, qJob):
- """return the status command for the QJob"""
- assert isinstance(qJob, PBSQJob)
+ '''return the status command for the QJob'''
+ assert(isinstance(qJob, PBSQJob))
return ("qstat", ["{}".format(qJob.jobid)])
def delete_command(self, qJob):
- """return the delete command for the QJob"""
- assert isinstance(qJob, PBSQJob)
+ '''return the delete command for the QJob'''
+ assert(isinstance(qJob, PBSQJob))
return ("qdel", ["{}".format(qJob.jobid)])
+
def _parse_int(self, string):
- m = re.search(r"(\d+)", string)
+ m = re.search(r'(\d+)', string)
if m:
return int(m.group(1))
return 0
@@ -97,10 +98,11 @@ def _pure_parse_status(self, qJob, status_output):
return QJobStatus.queued
return QJobStatus.unknown
+
def parse_status(self, qJob, status_output, status_err, returncode):
- """return the QJobStatus the QJob, except for testing for the status-file"""
+ '''return the QJobStatus the QJob, except for testing for the status-file'''
- assert isinstance(qJob, PBSQJob)
+ assert(isinstance(qJob, PBSQJob))
if returncode == 153:
# unknown jobid = no longer existing
return QJobStatus.finished
@@ -113,38 +115,32 @@ def parse_status(self, qJob, status_output, status_err, returncode):
return QJobStatus.unknown
+
class TestPBSQueue(unittest.TestCase):
+
def setUp(self):
super().setUp()
self.queue = PBSQueue()
self.jobid = "7483866.service2"
+
def test_parse_status(self):
- status_output = """
+ status_output = '''
Job id Name User Time Use S Queue
---------------- ---------------- ---------------- -------- - -----
7483866.service2 podV01.2 catharfl 00:00:07 R workq
-"""
+'''
qJob = PBSQJob(self.jobid)
- self.assertEqual(
- self.queue.parse_status(qJob, status_output, "", 35),
- QJobStatus.finished,
- "parsing returncode",
- )
- self.assertEqual(
- self.queue.parse_status(qJob, status_output, "", 0),
- QJobStatus.running,
- "parsing output",
- )
+ self.assertEqual(self.queue.parse_status(qJob, status_output, "", 35),
+ QJobStatus.finished, "parsing returncode")
+ self.assertEqual(self.queue.parse_status(qJob, status_output, "", 0),
+ QJobStatus.running, "parsing output")
def test_parse_submit(self):
- command_output = """7483866.service2
-"""
- self.assertEqual(
- self.queue.parse_submit(command_output, "", 0).jobid,
- self.jobid,
- "parsing qsub command",
- )
+ command_output = '''7483866.service2
+'''
+ self.assertEqual(self.queue.parse_submit(command_output, "", 0).jobid,
+ self.jobid, "parsing qsub command")
if __name__ == "__main__":
diff --git a/utils/SnapPy/METNO/PPI_CentOS7_Direct.py b/utils/SnapPy/METNO/PPI_CentOS7_Direct.py
index dc6f77bc..0f9c2481 100644
--- a/utils/SnapPy/METNO/PPI_CentOS7_Direct.py
+++ b/utils/SnapPy/METNO/PPI_CentOS7_Direct.py
@@ -1,25 +1,25 @@
# SNAP: Servere Nuclear Accident Programme
# Copyright (C) 1992-2017 Norwegian Meteorological Institute
-#
-# This file is part of SNAP. SNAP is free software: you can
-# redistribute it and/or modify it under the terms of the
-# GNU General Public License as published by the
+#
+# This file is part of SNAP. SNAP is free software: you can
+# redistribute it and/or modify it under the terms of the
+# GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
-#
+#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
-#
+#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
#
-"""
+'''
Created on Apr 28, 2017
@author: heikok
-"""
+'''
import os
from subprocess import TimeoutExpired
from time import sleep
@@ -31,22 +31,22 @@
class PPI_CentOS7_Direct(HPC):
- """
+ '''
Implementation of a HPC machine for PPI-centOS7
- """
+ '''
+
def __init__(self):
- """
+ '''
Constructor
- """
+ '''
connection = DirectConnection()
queue = SGEQueue()
super().__init__(connection, queue)
class TestPPI_Direct(unittest.TestCase):
- """tests for alvin, only working when having an existing forecast account on alvin"""
-
+ '''tests for alvin, only working when having an existing forecast account on alvin'''
def setUp(self):
unittest.TestCase.setUp(self)
self.hpc = HPC.by_name("ppi_centos7_direct")
@@ -56,7 +56,7 @@ def setUp(self):
def tearDown(self):
unittest.TestCase.tearDown(self)
for f in self.testFiles:
- if os.path.exists(f):
+ if (os.path.exists(f)):
os.unlink(f)
def test_connect(self):
@@ -64,6 +64,7 @@ def test_connect(self):
self.assertEqual(retval, 0, "command succeeded")
self.assertEqual(int(out), 5, "command output correct")
+
def test_timeout(self):
with self.assertRaises(TimeoutExpired):
self.hpc.syscall("sleep", ["5"], timeout=1)
@@ -73,15 +74,11 @@ def test_full(self):
self.hpc.syscall("rm", ["-r", self.rdir])
self.hpc.syscall("mkdir", ["-p", self.rdir])
with open(self.testFiles[0], "w") as fh:
- fh.write(
- """#! /bin/bash
+ fh.write('''#! /bin/bash
sleep 8
echo "finished" > {status}
- """.format(
- status=status_file
- )
- )
+ '''.format(status=status_file, rdir=self.rdir))
self.hpc.put_files([self.testFiles[0]], self.rdir)
qjob = self.hpc.submit_job(os.path.join(self.rdir, self.testFiles[0]), [])
self.assertIsNotNone(qjob, "job submitted")
@@ -106,6 +103,5 @@ def test_full(self):
self.hpc.syscall("rm", ["-r", self.rdir])
-
if __name__ == "__main__":
unittest.main()
diff --git a/utils/SnapPy/METNO/PPI_R8B.py b/utils/SnapPy/METNO/PPI_R8B.py
index 4feb81b5..31b03e29 100644
--- a/utils/SnapPy/METNO/PPI_R8B.py
+++ b/utils/SnapPy/METNO/PPI_R8B.py
@@ -1,25 +1,25 @@
# SNAP: Servere Nuclear Accident Programme
# Copyright (C) 1992-2017 Norwegian Meteorological Institute
-#
-# This file is part of SNAP. SNAP is free software: you can
-# redistribute it and/or modify it under the terms of the
-# GNU General Public License as published by the
+#
+# This file is part of SNAP. SNAP is free software: you can
+# redistribute it and/or modify it under the terms of the
+# GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
-#
+#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
-#
+#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
#
-"""
+'''
Created on Aug 08, 2017
@author: heikok
-"""
+'''
import os
from subprocess import TimeoutExpired
from time import sleep
@@ -31,22 +31,22 @@
class PPI_R8B(HPC):
- """
+ '''
Implementation of a HPC machine for ppi-r8login-b1
- """
+ '''
+
def __init__(self):
- """
+ '''
Constructor
- """
+ '''
connection = SSHConnection(machine="ppi-r8login-b1.int.met.no", port=22)
queue = SGEQueue()
super().__init__(connection, queue)
class Test_PPI_R8B(unittest.TestCase):
- """tests for ppi, only working when having an existing account on ppi"""
-
+ '''tests for ppi, only working when having an existing account on ppi'''
def setUp(self):
unittest.TestCase.setUp(self)
self.hpc = HPC.by_name("ppi_r8b")
@@ -56,7 +56,7 @@ def setUp(self):
def tearDown(self):
unittest.TestCase.tearDown(self)
for f in self.testFiles:
- if os.path.exists(f):
+ if (os.path.exists(f)):
os.unlink(f)
def test_connect(self):
@@ -64,6 +64,7 @@ def test_connect(self):
self.assertEqual(retval, 0, "command succeeded")
self.assertEqual(int(out), 5, "command output correct")
+
def test_timeout(self):
with self.assertRaises(TimeoutExpired):
self.hpc.syscall("sleep", ["5"], timeout=1)
@@ -73,15 +74,11 @@ def test_full(self):
self.hpc.syscall("rm", ["-r", self.rdir])
self.hpc.syscall("mkdir", ["-p", self.rdir])
with open(self.testFiles[0], "w") as fh:
- fh.write(
- """#! /bin/bash
+ fh.write('''#! /bin/bash
sleep 8
echo "finished" > {status}
- """.format(
- status=status_file,
- )
- )
+ '''.format(status=status_file, rdir=self.rdir))
self.hpc.put_files([self.testFiles[0]], self.rdir)
qjob = self.hpc.submit_job(os.path.join(self.rdir, self.testFiles[0]), [])
self.assertIsNotNone(qjob, "job submitted")
@@ -106,6 +103,5 @@ def test_full(self):
self.hpc.syscall("rm", ["-r", self.rdir])
-
if __name__ == "__main__":
unittest.main()
diff --git a/utils/SnapPy/METNO/PPI_R8B_Direct.py b/utils/SnapPy/METNO/PPI_R8B_Direct.py
index 8b416188..857eae93 100644
--- a/utils/SnapPy/METNO/PPI_R8B_Direct.py
+++ b/utils/SnapPy/METNO/PPI_R8B_Direct.py
@@ -1,25 +1,25 @@
# SNAP: Servere Nuclear Accident Programme
# Copyright (C) 1992-2017 Norwegian Meteorological Institute
-#
-# This file is part of SNAP. SNAP is free software: you can
-# redistribute it and/or modify it under the terms of the
-# GNU General Public License as published by the
+#
+# This file is part of SNAP. SNAP is free software: you can
+# redistribute it and/or modify it under the terms of the
+# GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
-#
+#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
-#
+#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
#
-"""
+'''
Created on Apr 28, 2017
@author: heikok
-"""
+'''
import os
from subprocess import TimeoutExpired
from time import sleep
@@ -31,22 +31,22 @@
class PPI_R8B_Direct(HPC):
- """
+ '''
Implementation of a HPC machine for PPI-r8
- """
+ '''
+
def __init__(self):
- """
+ '''
Constructor
- """
+ '''
connection = DirectConnection()
queue = SGEQueue()
super().__init__(connection, queue)
class TestPPI_R8B_Direct(unittest.TestCase):
- """tests for alvin, only working when having an existing forecast account on alvin"""
-
+ '''tests for alvin, only working when having an existing forecast account on alvin'''
def setUp(self):
unittest.TestCase.setUp(self)
self.hpc = HPC.by_name("ppi_r8b_direct")
@@ -56,7 +56,7 @@ def setUp(self):
def tearDown(self):
unittest.TestCase.tearDown(self)
for f in self.testFiles:
- if os.path.exists(f):
+ if (os.path.exists(f)):
os.unlink(f)
def test_connect(self):
@@ -64,6 +64,7 @@ def test_connect(self):
self.assertEqual(retval, 0, "command succeeded")
self.assertEqual(int(out), 5, "command output correct")
+
def test_timeout(self):
with self.assertRaises(TimeoutExpired):
self.hpc.syscall("sleep", ["5"], timeout=1)
@@ -73,15 +74,11 @@ def test_full(self):
self.hpc.syscall("rm", ["-r", self.rdir])
self.hpc.syscall("mkdir", ["-p", self.rdir])
with open(self.testFiles[0], "w") as fh:
- fh.write(
- """#! /bin/bash
+ fh.write('''#! /bin/bash
sleep 8
echo "finished" > {status}
- """.format(
- status=status_file,
- )
- )
+ '''.format(status=status_file, rdir=self.rdir))
self.hpc.put_files([self.testFiles[0]], self.rdir)
qjob = self.hpc.submit_job(os.path.join(self.rdir, self.testFiles[0]), [])
self.assertIsNotNone(qjob, "job submitted")
@@ -106,6 +103,5 @@ def test_full(self):
self.hpc.syscall("rm", ["-r", self.rdir])
-
if __name__ == "__main__":
unittest.main()
diff --git a/utils/SnapPy/METNO/SGEQueue.py b/utils/SnapPy/METNO/SGEQueue.py
index e07d1c0a..75c9fd3b 100644
--- a/utils/SnapPy/METNO/SGEQueue.py
+++ b/utils/SnapPy/METNO/SGEQueue.py
@@ -1,25 +1,25 @@
# SNAP: Servere Nuclear Accident Programme
# Copyright (C) 1992-2017 Norwegian Meteorological Institute
-#
-# This file is part of SNAP. SNAP is free software: you can
-# redistribute it and/or modify it under the terms of the
-# GNU General Public License as published by the
+#
+# This file is part of SNAP. SNAP is free software: you can
+# redistribute it and/or modify it under the terms of the
+# GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
-#
+#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
-#
+#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
#
-"""
+'''
Created on Nov 17, 2016
@author: heikok
-"""
+'''
import re
import sys
import unittest
@@ -36,47 +36,48 @@ def __init__(self, jobid):
class SGEQueue(Queue):
+
def __init__(self):
super().__init__()
def submit_command(self, jobscript, args):
- """return the submit command, e.g. qsub or sbatch
+ '''return the submit command, e.g. qsub or sbatch
Keyword arguments:
jobscript -- the jobscript to submit
args -- arguments to the jobscript
Returns: the complete command as tuple (program, args), e.g. (qsub, [jobscript arg1 arg2 arg3]
- """
+ '''
jobargs = [jobscript]
jobargs.extend(args)
return ("qsub", jobargs)
def parse_submit(self, command_output, command_error, returncode):
- """parse the output from the job-submission and return a QJob object"""
- if returncode == 0:
+ '''parse the output from the job-submission and return a QJob object'''
+ if (returncode == 0):
jobid = self._parse_int(command_output.strip())
- assert jobid != 0
+ assert(jobid != 0)
return SGEQJob("{:d}".format(jobid))
else:
- print(
- "qsub failed with code {}: {}".format(returncode, command_error),
- file=sys.stderr,
- )
+ print("qsub failed with code {}: {}".format(returncode, command_error),
+ file=sys.stderr)
return None
+
def status_command(self, qJob):
- """return the status command for the QJob"""
- assert isinstance(qJob, SGEQJob)
+ '''return the status command for the QJob'''
+ assert(isinstance(qJob, SGEQJob))
return ("qstat", [])
def delete_command(self, qJob):
- """return the delete command for the QJob"""
- assert isinstance(qJob, SGEQJob)
+ '''return the delete command for the QJob'''
+ assert(isinstance(qJob, SGEQJob))
return ("qdel", ["{}".format(qJob.jobid)])
+
def _parse_int(self, string):
- m = re.search(r"Your job (\d+)", string)
+ m = re.search(r'Your job (\d+)', string)
if m:
return int(m.group(1))
return 0
@@ -85,60 +86,55 @@ def _pure_parse_status(self, qJob, status_output):
for s in status_output.splitlines():
fields = s.split()
if len(fields) >= 5 and fields[0] == qJob.jobid:
- if re.match(r"(q|qw|hqw|hRwq)", fields[4]):
+ if re.match(r'(q|qw|hqw|hRwq)', fields[4]):
return QJobStatus.queued
- elif re.match(r"(r|t|Rr|Rt)", fields[4]):
+ elif re.match(r'(r|t|Rr|Rt)', fields[4]):
return QJobStatus.running
else:
return QJobStatus.finished
return QJobStatus.finished
+
def parse_status(self, qJob, status_output, status_err, returncode):
- """return the QJobStatus the QJob, except for testing for the status-file"""
+ '''return the QJobStatus the QJob, except for testing for the status-file'''
- assert isinstance(qJob, SGEQJob)
+ assert(isinstance(qJob, SGEQJob))
if returncode == 0:
return self._pure_parse_status(qJob, status_output)
else:
return QJobStatus.unknown
+
class TestSGEQueue(unittest.TestCase):
+
def setUp(self):
super().setUp()
self.queue = SGEQueue()
self.jobid = "8133836"
+
def test_parse_status(self):
- status_output = """
+ status_output = '''
job-ID prior name user state submit/start at queue slots ja-task-ID
-----------------------------------------------------------------------------------------------------------------
8133835 15.50032 SAsh010430 heikok r 08/01/2017 07:15:04 ded-parallelx.q@c6220ii-bvz1zz 1
8133836 15.50032 test.sh heikok r 08/01/2017 07:15:04 ded-parallelx.q@c6220ii-bvz1zz 1
8133837 15.50032 SAsh010345 heikok r 08/01/2017 07:15:04 ded-parallelx.q@c6220ii-bvz1zz 1
8089930 15.50000 osisaf_log steinare Eqw 07/29/2017 18:30:02 1
-"""
+'''
qJob = SGEQJob("8089930")
- self.assertEqual(
- self.queue.parse_status(qJob, status_output, "", 0),
- QJobStatus.finished,
- "parsing returncode",
- )
+ self.assertEqual(self.queue.parse_status(qJob, status_output, "", 0),
+ QJobStatus.finished, "parsing returncode")
qJob = SGEQJob(self.jobid)
- self.assertEqual(
- self.queue.parse_status(qJob, status_output, "", 0),
- QJobStatus.running,
- "parsing output",
- )
+ self.assertEqual(self.queue.parse_status(qJob, status_output, "", 0),
+ QJobStatus.running, "parsing output")
def test_parse_submit(self):
- command_output = """Your job 8133836 ("test.sh") has been submitted
-"""
- self.assertEqual(
- self.queue.parse_submit(command_output, "", 0).jobid,
- self.jobid,
- "parsing qsub command",
- )
+ command_output = '''Your job 8133836 ("test.sh") has been submitted
+'''
+ self.assertEqual(self.queue.parse_submit(command_output, "", 0).jobid,
+ self.jobid, "parsing qsub command")
if __name__ == "__main__":
diff --git a/utils/SnapPy/METNO/SLURMQueue.py b/utils/SnapPy/METNO/SLURMQueue.py
index e495ee49..4f0810e9 100644
--- a/utils/SnapPy/METNO/SLURMQueue.py
+++ b/utils/SnapPy/METNO/SLURMQueue.py
@@ -1,25 +1,25 @@
# SNAP: Servere Nuclear Accident Programme
# Copyright (C) 1992-2017 Norwegian Meteorological Institute
-#
-# This file is part of SNAP. SNAP is free software: you can
-# redistribute it and/or modify it under the terms of the
-# GNU General Public License as published by the
+#
+# This file is part of SNAP. SNAP is free software: you can
+# redistribute it and/or modify it under the terms of the
+# GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
-#
+#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
-#
+#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
#
-"""
+'''
Created on Nov 17, 2016
@author: heikok
-"""
+'''
import re
import sys
import unittest
@@ -36,47 +36,48 @@ def __init__(self, jobid):
class SLURMQueue(Queue):
+
def __init__(self):
super().__init__()
def submit_command(self, jobscript, args):
- """return the submit command, e.g. qsub or sbatch
+ '''return the submit command, e.g. qsub or sbatch
Keyword arguments:
jobscript -- the jobscript to submit
args -- arguments to the jobscript
Returns: the complete command as tuple (program, args), e.g. (qsub, [jobscript arg1 arg2 arg3]
- """
+ '''
jobargs = [jobscript]
jobargs.extend(args)
return ("sbatch", jobargs)
def parse_submit(self, command_output, command_error, returncode):
- """parse the output from the job-submission and return a QJob object"""
- if returncode == 0:
+ '''parse the output from the job-submission and return a QJob object'''
+ if (returncode == 0):
fields = command_output.split()
jobid = fields[3]
return SLURMQJob(jobid)
else:
- print(
- "sbatch failed with code {}: {}".format(returncode, command_error),
- file=sys.stderr,
- )
+ print("sbatch failed with code {}: {}".format(returncode, command_error),
+ file=sys.stderr)
return None
+
def status_command(self, qJob):
- """return the status command for the QJob"""
- assert isinstance(qJob, SLURMQJob)
- return ("squeue", ["-j", "{}".format(qJob.jobid)])
+ '''return the status command for the QJob'''
+ assert(isinstance(qJob, SLURMQJob))
+ return ("squeue", ['-j', "{}".format(qJob.jobid)])
def delete_command(self, qJob):
- """return the delete command for the QJob"""
- assert isinstance(qJob, SLURMQJob)
+ '''return the delete command for the QJob'''
+ assert(isinstance(qJob, SLURMQJob))
return ("scancel", ["{}".format(qJob.jobid)])
+
def _parse_int(self, string):
- m = re.search(r"(\d+)", string)
+ m = re.search(r'(\d+)', string)
if m:
return int(m.group(1))
return 0
@@ -101,42 +102,40 @@ def _pure_parse_status(self, qJob, status_output):
return QJobStatus.finished
return QJobStatus.unknown
+
def parse_status(self, qJob, status_output, status_err, returncode):
- """return the QJobStatus the QJob, except for testing for the status-file"""
+ '''return the QJobStatus the QJob, except for testing for the status-file'''
- assert isinstance(qJob, SLURMQJob)
+ assert(isinstance(qJob, SLURMQJob))
if returncode == 0:
return self._pure_parse_status(qJob, status_output)
else:
return QJobStatus.unknown
+
class TestSLURMQueue(unittest.TestCase):
+
def setUp(self):
super().setUp()
self.queue = SLURMQueue()
self.jobid = "2839455"
+
def test_parse_status(self):
- status_output = """
+ status_output = '''
JOBID PARTITION NAME USER ST TIME NODES NODELIST(REASON)
2839455 frost interact cooper R 1:17:46 1 n362
-"""
+'''
qJob = SLURMQJob(self.jobid)
- self.assertEqual(
- self.queue.parse_status(qJob, status_output, "", 0),
- QJobStatus.running,
- "parsing output",
- )
+ self.assertEqual(self.queue.parse_status(qJob, status_output, "", 0),
+ QJobStatus.running, "parsing output")
def test_parse_submit(self):
- command_output = """Submitted batch job 2839455
-"""
- self.assertEqual(
- self.queue.parse_submit(command_output, "", 0).jobid,
- self.jobid,
- "parsing squeue command",
- )
+ command_output = '''Submitted batch job 2839455
+'''
+ self.assertEqual(self.queue.parse_submit(command_output, "", 0).jobid,
+ self.jobid, "parsing squeue command")
if __name__ == "__main__":
diff --git a/utils/SnapPy/METNO/SSHConnection.py b/utils/SnapPy/METNO/SSHConnection.py
index f4e3265e..594a389f 100644
--- a/utils/SnapPy/METNO/SSHConnection.py
+++ b/utils/SnapPy/METNO/SSHConnection.py
@@ -1,25 +1,25 @@
# SNAP: Servere Nuclear Accident Programme
# Copyright (C) 1992-2017 Norwegian Meteorological Institute
-#
-# This file is part of SNAP. SNAP is free software: you can
-# redistribute it and/or modify it under the terms of the
-# GNU General Public License as published by the
+#
+# This file is part of SNAP. SNAP is free software: you can
+# redistribute it and/or modify it under the terms of the
+# GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
-#
+#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
-#
+#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
#
-"""
+'''
Created on Nov 15, 2016
@author: heikok
-"""
+'''
from METNO.HPC import typed_property, Connection
import os
@@ -30,28 +30,29 @@
class SSHConnection(Connection):
- """connection via ssh
+ '''connection via ssh
besides the main options username, machine and port, the user can set special attributes
- """
+ '''
username = typed_property("username", str)
- """name of the user on the remote machine, None possible"""
+ '''name of the user on the remote machine, None possible'''
machine = typed_property("machine", str)
- """name or IP-address of the remote machine"""
+ '''name or IP-address of the remote machine'''
remote_charset = typed_property("remote_charset", str)
- """charset of stdout of the remote machine, usually utf-8"""
+ '''charset of stdout of the remote machine, usually utf-8'''
port = typed_property("port", int)
- """port to connect on the remote machine, None possible"""
+ '''port to connect on the remote machine, None possible'''
ssh_command = typed_property("ssh_command", str)
- """command to use for ssh-connections, usually just 'ssh' for the ssh command in the PATH"""
+ '''command to use for ssh-connections, usually just 'ssh' for the ssh command in the PATH'''
scp_command = typed_property("scp_command", str)
- """command to use for scp-connections, usually just 'scp' for the scp command in the PATH"""
+ '''command to use for scp-connections, usually just 'scp' for the scp command in the PATH'''
ssh_options = typed_property("ssh_options", list)
- """additional options to add to ssh"""
+ '''additional options to add to ssh'''
scp_options = typed_property("scp_options", list)
- """additional options to add to scp"""
+ '''additional options to add to scp'''
+
def __init__(self, username=None, machine="localhost", port=None):
super().__init__()
@@ -61,24 +62,11 @@ def __init__(self, username=None, machine="localhost", port=None):
self.port = port
self.ssh_command = "ssh"
self.scp_command = "scp"
- self.scp_options = [
- "-o",
- "ConnectTimeout=20",
- "-o",
- "Batchmode=yes",
- "-o",
- "StrictHostKeyChecking=no",
- "-q",
- "-p",
- ]
- self.ssh_options = [
- "-o",
- "ConnectTimeout=20",
- "-o",
- "Batchmode=yes",
- "-o",
- "StrictHostKeyChecking=no",
- ]
+ self.scp_options = ["-o", "ConnectTimeout=20", "-o", "Batchmode=yes",
+ "-o", "StrictHostKeyChecking=no",
+ "-q", "-p"]
+ self.ssh_options = ["-o", "ConnectTimeout=20", "-o", "Batchmode=yes",
+ "-o", "StrictHostKeyChecking=no"]
return
def _build_scp_args(self):
@@ -98,22 +86,19 @@ def _build_ssh_args(self):
args.append(self.machine)
return args
+
def put_files(self, files, remote_path, timeout=None):
args = self._build_scp_args()
args.extend(files)
user = ""
if self.username is not None:
- user = self.username + "@"
- args.append(
- "{user}{machine}:{path}".format(
- user=user, machine=self.machine, path=remote_path
- )
- )
+ user = self.username + '@'
+ args.append("{user}{machine}:{path}".format(user=user,
+ machine=self.machine,
+ path=remote_path))
if sys.version_info > (3, 5, 0):
- proc = subprocess.run(
- args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, timeout=timeout
- )
+ proc = subprocess.run(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, timeout=timeout)
proc.check_returncode()
else:
subprocess.check_output(args, timeout=timeout)
@@ -123,21 +108,17 @@ def get_files(self, files, local_path=None, timeout=None):
args = self._build_scp_args()
user = ""
if self.username is not None:
- user = self.username + "@"
+ user = self.username + '@'
for file in files:
- args.append(
- "{user}{machine}:{path}".format(
- user=user, machine=self.machine, path=file
- )
- )
+ args.append("{user}{machine}:{path}".format(user=user,
+ machine=self.machine,
+ path=file))
if local_path is None:
local_path = "."
args.append(local_path)
if sys.version_info > (3, 5, 0):
- proc = subprocess.run(
- args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, timeout=timeout
- )
+ proc = subprocess.run(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, timeout=timeout)
proc.check_returncode()
else:
subprocess.check_output(args, timeout=timeout)
@@ -147,34 +128,31 @@ def get_files(self, files, local_path=None, timeout=None):
def syscall(self, program, args, timeout=None):
ssh_args = self._build_ssh_args()
args.insert(0, program)
- args = [shlex.quote(a) for a in args]
+ args = [ shlex.quote(a) for a in args ]
# print(args)
remote_command = " ".join(args)
ssh_args.append(remote_command)
if sys.version_info > (3, 5, 0):
- proc = subprocess.run(
- ssh_args,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- timeout=timeout,
- )
- return (
- proc.stdout.decode(self.remote_charset),
- proc.stderr.decode(self.remote_charset),
- proc.returncode,
- )
+ proc = subprocess.run(ssh_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, timeout=timeout)
+ return (proc.stdout.decode(self.remote_charset),
+ proc.stderr.decode(self.remote_charset),
+ proc.returncode)
else:
try:
output = subprocess.check_output(ssh_args, timeout=timeout)
- return (output.decode(self.remote_charset), "", 0)
+ return (output.decode(self.remote_charset),
+ '',
+ 0)
except subprocess.CalledProcessError as cpe:
- return (cpe.output.decode(self.remote_charset), "", cpe.returncode)
+ return (cpe.output.decode(self.remote_charset),
+ '',
+ cpe.returncode)
-class TestSSHConnection(unittest.TestCase):
- """Test for SSHConnection"""
+class TestSSHConnection(unittest.TestCase):
+ '''Test for SSHConnection'''
def setUp(self):
unittest.TestCase.setUp(self)
self.dir1 = os.path.join(os.path.dirname(__file__), "testdir1")
@@ -184,17 +162,19 @@ def setUp(self):
if not os.path.exists(self.dir2):
os.mkdir(self.dir2)
- self.files = ["file1", "file2"]
+ self.files = ['file1', 'file2']
for file in self.files:
infile = os.path.join(self.dir1, file)
if not os.path.exists(infile):
- with open(infile, "w") as ifh:
+ with open(infile, 'w') as ifh:
ifh.write("file: {name}".format(name=infile))
outfile = os.path.join(self.dir2, file)
if os.path.exists(outfile):
os.unlink(outfile)
self.conn = SSHConnection(machine="localhost")
+
+
def tearDown(self):
unittest.TestCase.tearDown(self)
for path in (self.dir1, self.dir2):
@@ -218,7 +198,8 @@ def test_get_files(self):
files_o = [os.path.join(self.dir2, x) for x in self.files]
self.conn.get_files(files_i, self.dir2, 5)
for file in files_o:
- self.assertTrue(os.path.exists(file), "file {} exists".format(file))
+ self.assertTrue(os.path.exists(file),
+ "file {} exists".format(file))
os.unlink(file)
def test_put_files(self):
diff --git a/utils/SnapPy/METNO/Stratus.py b/utils/SnapPy/METNO/Stratus.py
index 24ab3fea..09f50209 100644
--- a/utils/SnapPy/METNO/Stratus.py
+++ b/utils/SnapPy/METNO/Stratus.py
@@ -1,25 +1,25 @@
# SNAP: Servere Nuclear Accident Programme
# Copyright (C) 1992-2017 Norwegian Meteorological Institute
-#
-# This file is part of SNAP. SNAP is free software: you can
-# redistribute it and/or modify it under the terms of the
-# GNU General Public License as published by the
+#
+# This file is part of SNAP. SNAP is free software: you can
+# redistribute it and/or modify it under the terms of the
+# GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
-#
+#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
-#
+#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
#
-"""
+'''
Created on Nov 7, 2016
@author: heikok
-"""
+'''
import os
from subprocess import TimeoutExpired
from time import sleep
@@ -31,24 +31,22 @@
class Stratus(HPC):
- """
+ '''
Implementation of a HPC machine for stratus.nsc.liu.se
- """
+ '''
+
def __init__(self):
- """
+ '''
Constructor
- """
- connection = SSHConnection(
- username="metno_op", machine="stratus.nsc.liu.se", port=22
- )
+ '''
+ connection = SSHConnection(username="metno_op", machine="stratus.nsc.liu.se", port=22)
queue = SLURMQueue()
super().__init__(connection, queue)
class TestStratus(unittest.TestCase):
- """tests for stratus, only working when having an existing forecast account"""
-
+ '''tests for stratus, only working when having an existing forecast account'''
def setUp(self):
unittest.TestCase.setUp(self)
self.hpc = HPC.by_name("stratus")
@@ -59,7 +57,7 @@ def setUp(self):
def tearDown(self):
unittest.TestCase.tearDown(self)
for f in self.testFiles:
- if os.path.exists(f):
+ if (os.path.exists(f)):
os.unlink(f)
def test_connect(self):
@@ -67,6 +65,7 @@ def test_connect(self):
self.assertEqual(retval, 0, "command succeeded")
self.assertEqual(int(out), 5, "command output correct")
+
def test_timeout(self):
with self.assertRaises(TimeoutExpired):
self.hpc.syscall("sleep", ["5"], timeout=1)
@@ -75,16 +74,15 @@ def test_full(self):
status_file = os.path.join(self.rdir, "status")
self.hpc.syscall("rm", ["-r", self.rdir])
self.hpc.syscall("mkdir", ["-p", self.rdir])
-
+
for f in self.strangeFiles:
- with open(f, "w") as fh:
+ with open(f, 'w') as fh:
fh.write(f)
self.hpc.put_files([f], self.rdir)
self.hpc.syscall("ls", [f])
-
+
with open(self.testFiles[0], "w") as fh:
- fh.write(
- """#! /bin/bash
+ fh.write('''#! /bin/bash
#SBATCH -A met
#SBATCH --nodes=1 --ntasks-per-node=1 --time=01:00:00
@@ -94,10 +92,7 @@ def test_full(self):
sleep 8
echo "finished" > {status}
- """.format(
- status=status_file, rdir=self.rdir
- )
- )
+ '''.format(status=status_file, rdir=self.rdir))
self.hpc.put_files([self.testFiles[0]], self.rdir)
qjob = self.hpc.submit_job(os.path.join(self.rdir, self.testFiles[0]), [])
self.assertIsNotNone(qjob, "job submitted")
@@ -122,6 +117,5 @@ def test_full(self):
self.hpc.syscall("rm", ["-r", self.rdir])
-
if __name__ == "__main__":
unittest.main()
diff --git a/utils/SnapPy/METNO/Vilje.py b/utils/SnapPy/METNO/Vilje.py
index f1dc72ca..f8ef10d5 100644
--- a/utils/SnapPy/METNO/Vilje.py
+++ b/utils/SnapPy/METNO/Vilje.py
@@ -1,25 +1,25 @@
# SNAP: Servere Nuclear Accident Programme
# Copyright (C) 1992-2017 Norwegian Meteorological Institute
-#
-# This file is part of SNAP. SNAP is free software: you can
-# redistribute it and/or modify it under the terms of the
-# GNU General Public License as published by the
+#
+# This file is part of SNAP. SNAP is free software: you can
+# redistribute it and/or modify it under the terms of the
+# GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
-#
+#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
-#
+#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
#
-"""
+'''
Created on Nov 7, 2016
@author: heikok
-"""
+'''
import os
from subprocess import TimeoutExpired
from time import sleep
@@ -31,24 +31,22 @@
class Vilje(HPC):
- """
+ '''
Implementation of a HPC machine for vilje.notur.ntnu.no
- """
+ '''
+
def __init__(self):
- """
+ '''
Constructor
- """
- connection = SSHConnection(
- username="forecast", machine="vilje.hpc.ntnu.no", port=22
- )
+ '''
+ connection = SSHConnection(username="forecast", machine="vilje.hpc.ntnu.no", port=22)
queue = PBSQueue()
super().__init__(connection, queue)
class TestVilje(unittest.TestCase):
- """tests for vilje, only working when having an existing forecast account on vilje"""
-
+ '''tests for vilje, only working when having an existing forecast account on vilje'''
def setUp(self):
unittest.TestCase.setUp(self)
self.vilje = HPC.by_name("vilje")
@@ -58,7 +56,7 @@ def setUp(self):
def tearDown(self):
unittest.TestCase.tearDown(self)
for f in self.testFiles:
- if os.path.exists(f):
+ if (os.path.exists(f)):
os.unlink(f)
def test_connect(self):
@@ -66,6 +64,7 @@ def test_connect(self):
self.assertEqual(retval, 0, "command succeeded")
self.assertEqual(int(out), 5, "command output correct")
+
def test_timeout(self):
with self.assertRaises(TimeoutExpired):
self.vilje.syscall("sleep", ["5"], timeout=1)
@@ -75,8 +74,7 @@ def test_full(self):
self.vilje.syscall("rm", ["-r", self.rdir])
self.vilje.syscall("mkdir", [self.rdir])
with open(self.testFiles[0], "w") as fh:
- fh.write(
- """
+ fh.write('''
#! /bin/bash
#PBS -l select=1:ncpus=1:mpiprocs=1:mem=1GB
#PBS -lwalltime=00:00:10
@@ -86,10 +84,7 @@ def test_full(self):
sleep 8
echo "finished" > {}
- """.format(
- status_file
- )
- )
+ '''.format(status_file))
self.vilje.put_files([self.testFiles[0]], self.rdir)
qjob = self.vilje.submit_job(os.path.join(self.rdir, self.testFiles[0]), [])
self.assertIsNotNone(qjob, "job submitted")
@@ -113,6 +108,5 @@ def test_full(self):
self.vilje.syscall("rm", ["-r", self.rdir])
-
if __name__ == "__main__":
unittest.main()
diff --git a/utils/SnapPy/METNO/__init__.py b/utils/SnapPy/METNO/__init__.py
index 11f2921d..678d4f39 100644
--- a/utils/SnapPy/METNO/__init__.py
+++ b/utils/SnapPy/METNO/__init__.py
@@ -1,17 +1,17 @@
# SNAP: Servere Nuclear Accident Programme
# Copyright (C) 1992-2017 Norwegian Meteorological Institute
-#
-# This file is part of SNAP. SNAP is free software: you can
-# redistribute it and/or modify it under the terms of the
-# GNU General Public License as published by the
+#
+# This file is part of SNAP. SNAP is free software: you can
+# redistribute it and/or modify it under the terms of the
+# GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
-#
+#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
-#
+#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
#
diff --git a/utils/SnapPy/Snappy/AddBombIsotopes.py b/utils/SnapPy/Snappy/AddBombIsotopes.py
index 282a9b7e..2ae6ac00 100755
--- a/utils/SnapPy/Snappy/AddBombIsotopes.py
+++ b/utils/SnapPy/Snappy/AddBombIsotopes.py
@@ -1,42 +1,32 @@
#! /usr/bin/env python3
-"""
+'''
Convert aerosols in a SNAP file to isotopes using the fractional distribution from Tovdal (2002)
-"""
+'''
import netCDF4
-from Snappy.BombIsotopeFractions import BombIsotopeFractions
+from Snappy.BombIsotopeFractions import BombIsotopeFractions
def snap_add_bomb_isotopes(nc: netCDF4.Dataset):
- """
+ '''
ncfile: a netcdf-file with Aerosols opened in 'a'-mode
- """
+ '''
bomb_isotopes = BombIsotopeFractions()
aerosols = []
for var in nc.variables:
- if var.startswith("Aerosol") and var.endswith("acc_concentration"):
+ if var.startswith('Aerosol') and var.endswith('acc_concentration'):
aerosols.append(var[:-18])
isos = bomb_isotopes.isotopes()
- hours = nc["time"][:] # snap writes usually hours since start
- for var in [
- "concentration",
- "acc_dry_deposition",
- "acc_wet_deposition",
- "acc_concentration",
- ]:
+ hours = nc['time'][:] # snap writes usually hours since start
+ for var in ['concentration', 'acc_dry_deposition', 'acc_wet_deposition', 'acc_concentration']:
basevar = nc[f"{aerosols[0]}_{var}"]
for iso in isos:
# declare variables
name = f"{iso}_{var}"
if name not in nc.variables:
- nc.createVariable(
- name,
- basevar.datatype,
- basevar.dimensions,
- zlib=True,
- chunksizes=basevar.chunking(),
- )
+ nc.createVariable(name, basevar.datatype, basevar.dimensions, zlib=True,
+ chunksizes=basevar.chunking())
for attr in basevar.ncattrs():
nc[name].setncattr(attr, basevar.getncattr(attr))
laststepdata = 0
@@ -44,32 +34,26 @@ def snap_add_bomb_isotopes(nc: netCDF4.Dataset):
# convert data
basedata = 0
for aero in aerosols:
- basedata += nc[f"{aero}_{var}"][t, :]
+ basedata += nc[f"{aero}_{var}"][t,:]
for iso in isos:
name = f"{iso}_{var}"
frac = bomb_isotopes.fraction(iso, hr)
- if (var == "acc_concentration") and t > 1:
+ if (var == 'acc_concentration') and t > 1:
# no decay in dose-equivalent
- nc[name][t, :] = (
- nc[name][t - 1, :] + (basedata - laststepdata) * frac
- )
+ nc[name][t,:] = nc[name][t-1,:] + (basedata-laststepdata)*frac
else:
- nc[name][t, :] = frac * basedata
- laststepdata = basedata
+ nc[name][t,:] = frac*basedata
+ laststepdata = basedata
def main():
import argparse
-
- parser = argparse.ArgumentParser(
- description="add isotope distribution to a snap.nc with bomb-aerosols"
- )
+ parser = argparse.ArgumentParser(description="add isotope distribution to a snap.nc with bomb-aerosols")
parser.add_argument("--nc", help="snap.nc filename", required=True)
-
+
args = parser.parse_args()
- with netCDF4.Dataset(args.nc, "a"):
+ with netCDF4.Dataset(args.nc, 'a') as nc:
snap_add_bomb_isotopes(args.nc)
-
if __name__ == "__main__":
- main()
+ main()
diff --git a/utils/SnapPy/Snappy/AddToa.py b/utils/SnapPy/Snappy/AddToa.py
index 4fcc33bf..8f5ae58f 100755
--- a/utils/SnapPy/Snappy/AddToa.py
+++ b/utils/SnapPy/Snappy/AddToa.py
@@ -43,10 +43,10 @@ def add_toa_to_nc(nc: netCDF4.Dataset):
raise Exception(
f"no variable with *_acc_concentration found in file: {nc.filepath()}"
)
- th = 0.0001 # low threshold
+ th = 0.0001 # low threshold
# arrived: data >= th
# flying: data < th
- data = numpy.where(data >= th, 0.0, timeDelta)
+ data = numpy.where(data >= th, 0., timeDelta)
# print(data.dtype) must be float!
toa = numpy.sum(data, axis=0)
toa[toa > timeMax] = -999
diff --git a/utils/SnapPy/Snappy/BombIsotopeFractions.py b/utils/SnapPy/Snappy/BombIsotopeFractions.py
index 50bf00ed..ab148701 100644
--- a/utils/SnapPy/Snappy/BombIsotopeFractions.py
+++ b/utils/SnapPy/Snappy/BombIsotopeFractions.py
@@ -1,57 +1,49 @@
import os
import csv
-
+import re
class BombIsotopeFractions:
# fractions{isotope} = {0: frac0, 10: frac10, 20: frac20} with 0-40 in hours and frac0-frac40 fractions of total fission-products in Bq
_fractions = None
- _timesteps = [0, 10, 20, 30, 40]
-
+ _timesteps = [0,10,20,30,40]
def __new__(cls):
- """BombIsotopeFractions singleton data"""
+ '''BombIsotopeFractions singleton data'''
if cls._fractions is None:
cls._fractions = dict()
directory = os.path.join(os.path.dirname(__file__), "resources")
with open(
- os.path.join(directory, "bomb-isotope-distribution_Tovedal.csv"),
- mode="r",
- encoding="UTF-8",
- newline="",
+ os.path.join(directory, "bomb-isotope-distribution_Tovedal.csv"), mode="r", encoding="UTF-8", newline=''
) as fh:
- csvreader = csv.reader(fh, delimiter=",")
+ csvreader = csv.reader(fh, delimiter=',')
for i in range(2):
next(csvreader)
header = next(csvreader)
offset = 9
- for i, hrs in enumerate(cls._timesteps):
- if f"t={hrs}" not in header[offset + i]:
- raise Exception(
- f"error in header for hour {hrs}: {header[offset+i]}"
- )
+ for i,hrs in enumerate(cls._timesteps):
+ if f"t={hrs}" not in header[offset+i]:
+ raise Exception(f"error in header for hour {hrs}: {header[offset+i]}")
for row in csvreader:
- if "-" in row[0]:
- isotope = row[0].replace(
- "-", ""
- ) # without - as ususal in snappy
+ if '-' in row[0]:
+ isotope = row[0].replace('-', '') # without - as ususal in snappy
stepfraction = {}
- for i, hrs in enumerate(cls._timesteps):
- stepfraction[hrs] = float(row[offset + i]) / 100.0
+ for i,hrs in enumerate(cls._timesteps):
+ stepfraction[hrs] = float(row[offset+i])/100.
cls._fractions[isotope] = stepfraction
obj = object.__new__(cls)
return obj
-
+
def isotopes(self):
- """
- list over isotopes as ['Cs137', 'Cs134', ...]
- """
+ '''
+ list over isotopes as ['Cs137', 'Cs134', ...]
+ '''
return self._fractions.keys()
-
+
def fraction(self, isotope: str, hrs: int) -> float:
- """
- @param isotope is a isotope name like Cs137 or Cs-137
- @param hrs since bomb, intra/extrapolated
- return a fraction of the total activity
- """
+ '''
+ @param isotope is a isotope name like Cs137 or Cs-137
+ @param hrs since bomb, intra/extrapolated
+ return a fraction of the total activity
+ '''
isotope = isotope.replace("-", "")
stepfracs = self._fractions[isotope]
if hrs < 0:
@@ -63,22 +55,22 @@ def fraction(self, isotope: str, hrs: int) -> float:
for i, nhr in enumerate(self._timesteps):
if nhr >= hrs:
- phr = self._timesteps[i - 1]
- hfrac = (hrs - phr) / (nhr - phr)
+ phr = self._timesteps[i-1]
+ hfrac = (hrs-phr)/(nhr-phr)
nfrac = stepfracs[nhr]
pfrac = stepfracs[phr]
- frac = pfrac + hfrac * (nfrac - pfrac)
+ frac = pfrac + hfrac*(nfrac-pfrac)
return frac
if __name__ == "__main__":
bfracs = BombIsotopeFractions()
- assert "Cs137" in bfracs.isotopes()
- assert bfracs.fraction("Cs137", 0) == 0.0002 / 100
- assert len(bfracs.isotopes()) > 10
- for hr in range(0, 48):
+ assert('Cs137' in bfracs.isotopes())
+ assert(bfracs.fraction('Cs137',0) == 0.0002/100)
+ assert(len(bfracs.isotopes()) > 10)
+ for hr in range(0,48):
tot = 0
for iso in bfracs.isotopes():
tot += bfracs.fraction(iso, hr)
- assert tot > 0.99
- assert tot < 1.01
+ assert(tot > .99)
+ assert(tot < 1.01)
diff --git a/utils/SnapPy/Snappy/BrowserWidget.py b/utils/SnapPy/Snappy/BrowserWidget.py
index 8a11a888..abf0e065 100644
--- a/utils/SnapPy/Snappy/BrowserWidget.py
+++ b/utils/SnapPy/Snappy/BrowserWidget.py
@@ -1,52 +1,52 @@
# SNAP: Servere Nuclear Accident Programme
# Copyright (C) 1992-2017 Norwegian Meteorological Institute
-#
-# This file is part of SNAP. SNAP is free software: you can
-# redistribute it and/or modify it under the terms of the
-# GNU General Public License as published by the
+#
+# This file is part of SNAP. SNAP is free software: you can
+# redistribute it and/or modify it under the terms of the
+# GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
-#
+#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
-#
+#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
#
"""
********************* BrowserWidget ************************
- A simple Web Browser as interface-tab using javascript callbacks
+ A simpel Web Browser as interface-tab using javascript callbacks
to get the information back to python
*******************************************************************
"""
-# Get Qt version
+#Get Qt version
from PyQt5 import Qt
+qt_version = [ int(v) for v in Qt.QT_VERSION_STR.split('.') ]
-import sys
-
-qt_version = [int(v) for v in Qt.QT_VERSION_STR.split(".")]
-
-# Import correct parts of Qt
-# if (qt_version[0] >= 5 and qt_version[1] >= 6):
+#Import correct parts of Qt
+#if (qt_version[0] >= 5 and qt_version[1] >= 6):
# QtWebEngineWidgets broken under bionic:
# https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=882805 (fixed Dec. 2019)
-if False:
- # Qt5.6 and later - QtWebKitWidgets is deprecated
+if (False):
+ #Qt5.6 and later - QtWebKitWidgets is deprecated
from PyQt5 import QtCore, QtWidgets
from PyQt5.QtWebEngineWidgets import QWebEnginePage as QWebPage
from PyQt5.QtWebEngineWidgets import QWebEngineView as QWebView
-elif qt_version[0] >= 5:
- # Qt5.5 and earlier
+elif (qt_version[0] >= 5):
+ #Qt5.5 and earlier
from PyQt5 import QtCore, QtWidgets
from PyQt5.QtWebKitWidgets import QWebPage, QWebView
else:
raise ImportError("Unsupported version of PyQt")
+
+from builtins import str
+import sys
class StartWebPage(QWebPage):
@@ -61,15 +61,14 @@ def acceptNavigationRequest(self, frame, req, nav_type):
self.formSubmitted.emit(req.url())
return False
else:
- return super(StartWebPage, self).acceptNavigationRequest(
- frame, req, nav_type
- )
+ return super(StartWebPage, self).acceptNavigationRequest(frame, req, nav_type)
class BrowserWidget(QtWidgets.QWidget):
+
def __init__(self):
"""
- Create main window with browser and a button
+ Create main window with browser and a button
"""
super(BrowserWidget, self).__init__()
@@ -83,9 +82,9 @@ def __init__(self):
self.set_form_handler(self._default_form_handler)
- # self.default_url = "https://dokit.met.no/fou/kl/prosjekter/eemep/esnap_userdoc"
- # self.tb_url.setText(self.default_url)
- # self.browse()
+ #self.default_url = "https://dokit.met.no/fou/kl/prosjekter/eemep/esnap_userdoc"
+ #self.tb_url.setText(self.default_url)
+ #self.browse()
def browse(self):
"""browse an url"""
@@ -94,11 +93,12 @@ def browse(self):
self.webview.load(QtCore.QUrl(url))
self.webview.show()
+
def url_changed(self, url):
- """Triggered when the url is changed"""
+ """ Triggered when the url is changed """
def set_html(self, text: str):
- """set html string"""
+ """ set html string"""
self.web_page = StartWebPage()
self.webview.setPage(self.web_page)
self.webview.page().formSubmitted.connect(self._handle_formSubmitted)
@@ -110,23 +110,25 @@ def _default_form_handler(dict):
print(str.format("{0} => {1}", key, value))
def set_form_handler(self, handler):
- """the form handler should accept a dictionary with query results as input"""
+ """ the form handler should accept a dictionary with query results as input """
self.form_handler = handler
def evaluate_javaScript(self, jscript):
- self.webview.page().mainFrame().evaluateJavaScript(jscript)
+ self.webview.page().mainFrame().evaluateJavaScript(jscript);
def _handle_formSubmitted(self, url):
# I don't manage to get the right query strings from the web-page
- print("handleFromSubmitted:" + url.toString())
- self.form_handler(QtCore.QUrlQuery(url).queryItems(QtCore.QUrl.FullyDecoded))
+ print("handleFromSubmitted:"+url.toString());
+ self.form_handler(QtCore.QUrlQuery(url).queryItems(QtCore.QUrl.FullyDecoded));
+
+
if __name__ == "__main__":
app = QtWidgets.QApplication(sys.argv)
tab1 = BrowserWidget()
- html = """
+ html = '''
-"""
+'''
tab1.set_html(html)
tabs = QtWidgets.QTabWidget()
- tabs.addTab(tab1, "Test")
- tabs.resize(960, 1024)
+ tabs.addTab(tab1, 'Test')
+ tabs.resize(960,1024)
tabs.show()
sys.exit(app.exec_())
diff --git a/utils/SnapPy/Snappy/Countries.py b/utils/SnapPy/Snappy/Countries.py
index 5a9dc3d0..3bea9544 100644
--- a/utils/SnapPy/Snappy/Countries.py
+++ b/utils/SnapPy/Snappy/Countries.py
@@ -1,17 +1,17 @@
# SNAP: Servere Nuclear Accident Programme
# Copyright (C) 1992-2017 Norwegian Meteorological Institute
-#
-# This file is part of SNAP. SNAP is free software: you can
-# redistribute it and/or modify it under the terms of the
-# GNU General Public License as published by the
+#
+# This file is part of SNAP. SNAP is free software: you can
+# redistribute it and/or modify it under the terms of the
+# GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
-#
+#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
-#
+#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
#
@@ -21,22 +21,22 @@
def get_country_list(name):
- """
+ '''
get a list of countries for a region. Currently implemented regions: europe
- """
+ '''
filename = os.path.join(os.path.dirname(__file__), "resources", f"{name}.csv")
countries = []
- point = re.compile(r"POINT\s*\((.*)\)")
- with open(filename, newline="") as fh:
+ point = re.compile(r'POINT\s*\((.*)\)')
+ with open(filename, newline='') as fh:
reader = csv.DictReader(fh)
for row in reader:
- if row["id"].startswith("#"):
+ if row['id'].startswith('#'):
continue
- m = re.search(point, row["point"])
+ m = re.search(point, row['point'])
if m:
lon, lat = m[1].split(sep=" ")
- countries.append(Country(row["name"], float(lat), float(lon)))
+ countries.append(Country(row['name'], float(lat), float(lon)))
return countries
@@ -45,27 +45,22 @@ def __init__(self, name, lat: float, lon: float) -> None:
self.name = name
self.lat = lat
self.lon = lon
-
def __str__(self) -> str:
return ",".join([self.name, f"POINT({self.lon:.3f} {self.lat:.3f})"])
-
def __repr__(self) -> str:
return f"Country({self})"
-
def _to_diana_txt_file(filename, name):
- with open(filename, "wt") as fh:
- fh.write(
- """# -*- coding: utf-8 -*-
+ with open(filename, 'wt') as fh:
+ fh.write('''# -*- coding: utf-8 -*-
[COLUMNS
Lon:r Lat:r Name:s ]
[DATA]
-"""
- )
+''')
for cc in get_country_list(name):
- fh.write(f'{cc.lon} {cc.lat} "{cc.name}"\n')
+ fh.write(f"{cc.lon} {cc.lat} \"{cc.name}\"\n")
-if __name__ == "__main__":
- print(get_country_list("europe"))
+if __name__ == '__main__':
+ print(get_country_list('europe'))
diff --git a/utils/SnapPy/Snappy/EEMEP/Controller.py b/utils/SnapPy/Snappy/EEMEP/Controller.py
index a8824c48..98b70a14 100644
--- a/utils/SnapPy/Snappy/EEMEP/Controller.py
+++ b/utils/SnapPy/Snappy/EEMEP/Controller.py
@@ -15,13 +15,14 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
#
-"""
+'''
Created on Aug 9, 2016
@author: heikok
-"""
+'''
from PyQt5 import QtWidgets
+from collections import deque
import datetime
import getpass
import json
@@ -30,23 +31,21 @@
import pwd
import re
import sys
+from time import gmtime, strftime
import traceback
-from PyQt5.QtCore import (
- QThread,
- pyqtSignal,
-)
+from PyQt5.QtCore import QProcess, QProcessEnvironment, QThread, QIODevice, QThreadPool, pyqtSignal
from Snappy.BrowserWidget import BrowserWidget
from Snappy.EEMEP.Resources import Resources
from Snappy.EEMEP.ModelRunner import ModelRunner
import Snappy.Utils
-
def getFileOwner(filename):
pwuid = pwd.getpwuid(os.stat(filename).st_uid)
return pwuid.pw_name, pwuid.pw_gecos
+
def debug(*objs):
print("DEBUG: ", *objs, file=sys.stderr)
@@ -68,19 +67,22 @@ def run(self):
debug("running")
self.update_log_signal.emit()
self.sleep(3)
- except Exception:
+ except:
traceback.print_exc()
-class Controller:
- """
+
+
+class Controller():
+ '''
Controller for EEMEP Widget. Starts the browserwidget as self.main and connects it to the form handler
- """
+ '''
+
def __init__(self):
- """
+ '''
Initialize Widget and handlers
- """
+ '''
self.res = Resources()
self.main = BrowserWidget()
self.main.set_html(self.res.getStartScreen())
@@ -94,112 +96,86 @@ def __init__(self):
self.lastLog = []
self.logfile_size = 0
- def write_log(self, txt: str, max_lines=30, clear_log=False):
- if clear_log:
+
+ def write_log(self, txt:str, max_lines=30, clear_log=False):
+ if (clear_log):
self.lastLog = [txt]
else:
self.lastLog += txt.splitlines()
debug(txt)
- # Write at most 30 lines to screen
- if len(self.lastLog) > max_lines:
+ #Write at most 30 lines to screen
+ if (len(self.lastLog) > max_lines):
self.lastLog = self.lastLog[-max_lines:]
+ lines = None
- self.main.evaluate_javaScript(
- "updateEemepLog({0});".format(json.dumps("\n".join(self.lastLog)))
- )
+ self.main.evaluate_javaScript('updateEemepLog({0});'.format(json.dumps("\n".join(self.lastLog))))
def update_log_query(self, qDict):
- # MainBrowserWindow._default_form_handler(qDict)
- # self.write_log("updating...")
+ #MainBrowserWindow._default_form_handler(qDict)
+ #self.write_log("updating...")
if os.path.isfile(self.volcano_logfile):
current_size = os.path.getsize(self.volcano_logfile)
# Log overwritten - new file (this should not happen)
- if current_size < self.logfile_size:
- self.write_log(
- "WARNING: Logfile overwritten - someone else is running this volcano also"
- )
+ if (current_size < self.logfile_size):
+ self.write_log("WARNING: Logfile overwritten - someone else is running this volcano also")
self.logfile_size = 0
# If new content in logfile
- if current_size > self.logfile_size:
+ if (current_size > self.logfile_size):
with open(self.volcano_logfile) as lf:
lf.seek(self.logfile_size)
for line in lf:
self.write_log(line)
self.logfile_size = current_size
else:
- if os.path.isfile(self.volcano_file):
- self.write_log(
- "Queue busy {:%Y-%m-%d %H:%M:%S}".format(datetime.datetime.now())
- )
- if self.res.getModelRunnerLogs():
+ if (os.path.isfile(self.volcano_file)):
+ self.write_log("Queue busy {:%Y-%m-%d %H:%M:%S}".format(datetime.datetime.now()))
+ if (self.res.getModelRunnerLogs()):
self.write_log(self.res.getModelRunnerLogs())
else:
- # Check if volcano logfile exists after waiting slightly
+ #Check if volcano logfile exists after waiting slightly
# (to avoid race conditions in which volcano.xml is deleted before logfile is created)
time.sleep(1.0)
- if os.path.isfile(self.volcano_logfile):
- self.write_log(
- "ERROR: Neither '{:s}' \nnor '{:s}' Exists!\nSomeone may have deleted the run.".format(
- self.volcano_file, self.volcano_logfile
- )
- )
+ if (os.path.isfile(self.volcano_logfile)):
+ self.write_log("ERROR: Neither '{:s}' \nnor '{:s}' Exists!\nSomeone may have deleted the run.".format(self.volcano_file, self.volcano_logfile))
self.eemepRunning = "inactive"
def cancel_first_in_queue(self, qDict):
- """Mark all currently active model-runs for abort"""
+ '''Mark all currently active model-runs for abort'''
for dirpath, dirs, files in os.walk(self.res.getOutputDir()):
for file in files:
if file == ModelRunner.ABORT_FILENAME:
try:
self.write_log("trying to abort {}".format(dirpath))
- abortLogFile = (
- datetime.datetime.now()
- .strftime("{fname}_%Y%m%d-%H%M%S")
- .format(fname=ModelRunner.ABORT_FILENAME)
- )
- with open(os.path.join(dirpath, abortLogFile), "wt") as lh:
+ abortLogFile = datetime.datetime.now().strftime('{fname}_%Y%m%d-%H%M%S').format(fname=ModelRunner.ABORT_FILENAME)
+ with open(os.path.join(dirpath, abortLogFile), 'wt') as lh:
lh.write("aborted by {}".format(getpass.getuser()))
os.remove(os.path.join(dirpath, file))
- except Exception:
+ except:
traceback.print_exc()
self.write_log("aborting {} failed!".format(dirpath))
pass
def cancel_submitted(self, qDict):
- """Cancel the last submitted volcano-file"""
- if os.path.isfile(self.volcano_file):
+ '''Cancel the last submitted volcano-file'''
+ if (os.path.isfile(self.volcano_file)):
owner, gecos = getFileOwner(self.volcano_file)
user = getpass.getuser()
- debug(
- "Deleting {:s} owned by {:s} ({:s}) with user {:s}".format(
- self.volcano_file, owner, gecos, user
- )
- )
- if owner != user:
- self.write_log(
- "WARNING: {:s}\nwas started by {:s} ({:s}). Please notify this user that you canceled the run!".format(
- self.volcano_file, owner, gecos
- )
- )
+ debug("Deleting {:s} owned by {:s} ({:s}) with user {:s}".format(self.volcano_file, owner, gecos, user))
+ if (owner != user):
+ self.write_log("WARNING: {:s}\nwas started by {:s} ({:s}). Please notify this user that you canceled the run!".format(self.volcano_file, owner, gecos))
try:
os.remove(os.path.join(self.volcano_file))
self.write_log("{} deleted".format(self.volcano_file))
self.eemepRunning = "inactive"
except Exception as e:
- self.write_log(
- "ERROR: could not cancel the currently submitted volcano!\n Error was {:s}".format(
- e.msg
- )
- )
+ self.write_log("ERROR: could not cancel the currently submitted volcano!\n Error was {:s}".format(e.msg))
pass
else:
- self.write_log(
- "Volcano file ('{:s}') does not exist".format(self.volcano_file)
- )
+ self.write_log("Volcano file ('{:s}') does not exist".format(self.volcano_file))
def update_log(self):
self.update_log_query({})
@@ -207,11 +183,10 @@ def update_log(self):
def _create_form_handler(self):
def handler(queryDict):
"""a form-handler with closure for self"""
- options = {
- "Run": self.run_eemep_query,
- "Update": self.update_log_query,
- "Cancel+active": self.cancel_first_in_queue,
- "Cancel+submitted": self.cancel_submitted,
+ options = { 'Run' : self.run_eemep_query,
+ 'Update' : self.update_log_query,
+ 'Cancel+active': self.cancel_first_in_queue,
+ 'Cancel+submitted': self.cancel_submitted
}
# mapping from QList to simple dictionary
qDict = dict()
@@ -219,19 +194,14 @@ def handler(queryDict):
qDict[key] = value
# calling the correct handler depending on the module
try:
- options[qDict["action"]](qDict)
+ options[qDict['action']](qDict)
except TypeError as ex:
self.write_log("type-error: {}".format(ex))
except ValueError as ex:
self.write_log("value-error: {}".format(ex))
- except Exception:
- self.write_log(
- "Unexpected error on {0}: {1}".format(
- qDict["action"], sys.exc_info()[0]
- )
- )
+ except:
+ self.write_log("Unexpected error on {0}: {1}".format(qDict['action'],sys.exc_info()[0]))
raise
-
return handler
def run_eemep_query(self, qDict):
@@ -242,165 +212,124 @@ def run_eemep_query(self, qDict):
for key, value in qDict.items():
print(str.format("{0} => {1}", key, value))
errors = ""
- match = re.search(
- r"(\d{4})-(\d{2})-(\d{2})[\+\s]+(\d{1,2})", qDict["startTime"]
- )
+ match = re.search(r'(\d{4})-(\d{2})-(\d{2})[\+\s]+(\d{1,2})', qDict['startTime'])
if match:
- startTime = "{0} {1} {2} {3}".format(*match.group(1, 2, 3, 4))
- startDT = datetime.datetime(*tuple(map(int, list(match.group(1, 2, 3, 4)))))
- modelStartDT = datetime.datetime(
- startDT.year, startDT.month, startDT.day, 0, 0, 0
- )
+ startTime = "{0} {1} {2} {3}".format(*match.group(1,2,3,4))
+ startDT = datetime.datetime(*tuple(map(int, list(match.group(1,2,3,4)))))
+ modelStartDT = datetime.datetime(startDT.year, startDT.month, startDT.day, 0, 0, 0)
else:
- errors += "Cannot interpret startTime: {0}\n".format(qDict["startTime"])
+ errors += "Cannot interpret startTime: {0}\n".format(qDict['startTime'])
try:
- runTime = int(qDict["runTime"])
- except Exception:
- errors += "Cannot interpret runTime: {}\n".format(qDict["runTime"])
+ runTime = int(qDict['runTime'])
+ except:
+ errors += "Cannot interpret runTime: {}\n".format(qDict['runTime'])
restart = "false"
- if "restart_file" in qDict and qDict["restart_file"].lower() == "true":
- restart = "restart"
+ if ('restart_file' in qDict and qDict['restart_file'].lower() == 'true'):
+ restart = 'restart'
- if qDict["volcanotype"] == "default":
- type = "M0"
+ if qDict['volcanotype'] == 'default':
+ type = 'M0'
else:
- type = qDict["volcanotype"]
+ type = qDict['volcanotype']
volcanoes = self.res.readVolcanoes()
- if qDict["volcano"] and volcanoes[qDict["volcano"]]:
- qDict["volcano"]
- volcano = re.sub(r"[^\w.-_]", "_", volcanoes[qDict["volcano"]]["NAME"])
- latf = volcanoes[qDict["volcano"]]["LATITUDE"]
- lonf = volcanoes[qDict["volcano"]]["LONGITUDE"]
- altf = volcanoes[qDict["volcano"]]["ELEV"]
- if qDict["volcanotype"] == "default":
- type = volcanoes[qDict["volcano"]]["ERUPTIONTYPE"]
+ if (qDict['volcano'] and volcanoes[qDict['volcano']]):
+ tag = qDict['volcano']
+ volcano = re.sub(r'[^\w.-_]','_',volcanoes[qDict['volcano']]['NAME'])
+ latf = volcanoes[qDict['volcano']]['LATITUDE']
+ lonf = volcanoes[qDict['volcano']]['LONGITUDE']
+ altf = volcanoes[qDict['volcano']]['ELEV']
+ if qDict['volcanotype'] == 'default':
+ type = volcanoes[qDict['volcano']]['ERUPTIONTYPE']
else:
- lat = qDict["latitude"]
- lon = qDict["longitude"]
- alt = qDict["altitude"]
+ lat = qDict['latitude']
+ lon = qDict['longitude']
+ alt = qDict['altitude']
try:
latf = Snappy.Utils.parseLat(lat)
lonf = Snappy.Utils.parseLon(lon)
altf = float(alt)
- except Exception:
- latf = 0.0
- lonf = 0.0
- altf = 0.0
- errors += "Cannot interpret latitude/longitude/altitude: {0}/{1}/{2}\n".format(
- lat, lon, alt
- )
+ except:
+ latf = 0.
+ lonf = 0.
+ altf = 0.
+ errors += "Cannot interpret latitude/longitude/altitude: {0}/{1}/{2}\n".format(lat,lon,alt);
volcano = "{lat}N_{lon}E".format(lat=latf, lon=lonf)
- debug(
- "volcano: {0} {1:.2f} {2:.2f} {3} {4}".format(
- volcano, latf, lonf, altf, type
- )
- )
+ debug("volcano: {0} {1:.2f} {2:.2f} {3} {4}".format(volcano, latf, lonf, altf, type))
try:
volctype = self.res.readVolcanoType(type)
except Exception as ex:
errors += str(ex) + "\n"
- errors += "Please select Height and Type (Advanced) manually.\n"
+ errors += 'Please select Height and Type (Advanced) manually.\n'
+
+ self.write_log("working with {:s} (lat={:.2f}N lon={:.2f}E) starting at {:s}".format(volcano, latf, lonf, startTime))
- self.write_log(
- "working with {:s} (lat={:.2f}N lon={:.2f}E) starting at {:s}".format(
- volcano, latf, lonf, startTime
- )
- )
# Get cloud height if supplied and calculate eruption rate
- if qDict["cloudheight"]:
+ if qDict['cloudheight']:
try:
- cheight = float(qDict["cloudheight"])
- except Exception:
- errors += "cannot interpret cloudheight (m): {0}\n".format(
- qDict["cloudheight"]
- )
-
- if cheight % 1 != 0:
- self.write_log(
- "WARNING: Ash cloud height supplied with fraction. Please check that you supplied meters, not km!"
- )
-
- if qDict["cloudheight_datum"] == "mean_sea_level":
+ cheight = float(qDict['cloudheight'])
+ except:
+ errors += "cannot interpret cloudheight (m): {0}\n".format(qDict['cloudheight'])
+
+ if (cheight % 1 != 0):
+ self.write_log("WARNING: Ash cloud height supplied with fraction. Please check that you supplied meters, not km!")
+
+ if qDict['cloudheight_datum'] == 'mean_sea_level':
# Interpret cloud height as above sea level
# - remove volcano vent altitude to get plume height
- self.write_log(
- "Ash cloud height measured from mean sea level: {:.2f} km".format(
- cheight / 1000.0
- )
- )
+ self.write_log("Ash cloud height measured from mean sea level: {:.2f} km".format(cheight/1000.0))
cheight = cheight - altf
- elif qDict["cloudheight_datum"] == "vent":
+ elif qDict['cloudheight_datum'] == 'vent':
# Interpret cloud height as above vent
pass
else:
- errors += "cannot interpret cloud height datum: {:s}".format(
- qDict["cloudheight_datum"]
- )
+ errors += "cannot interpret cloud height datum: {:s}".format(qDict['cloudheight_datum'])
# rate in kg/s from Mastin et al. 2009, formular (1) and a volume (DRE) (m3) to
# mass (kg) density of 2500kg/m3
- rate = 2500.0 * ((0.5 * max(0, cheight) / 1000.0) ** (1.0 / 0.241))
+ rate = 2500.0 * ((0.5*max(0, cheight)/1000.0)**(1.0/0.241))
else:
- cheight = float(volctype["H"]) * 1000 # km -> m
- rate = float(volctype["dM/dt"])
-
- # Check negative ash cloud height
- if cheight <= 0:
- errors += "Negative cloud height {:.2f}! Please check ash cloud.".format(
- cheight / 1000.0
- )
- self.write_log(
- "Ash cloud height measured from volcano: {:.2f} km, rate: {:.0f} kg/s, volcano height: {:.2f} km.".format(
- cheight / 1000.0, rate, altf / 1000.0
- )
- )
+ cheight = float(volctype['H']) * 1000 # km -> m
+ rate = float(volctype['dM/dt'])
+
+ #Check negative ash cloud height
+ if (cheight <= 0):
+ errors += "Negative cloud height {:.2f}! Please check ash cloud.".format(cheight/1000.0)
+ self.write_log("Ash cloud height measured from volcano: {:.2f} km, rate: {:.0f} kg/s, volcano height: {:.2f} km.".format(cheight/1000.0, rate, altf/1000.0))
# Abort if errors
- if len(errors) > 0:
- debug('updateLog("{0}");'.format(json.dumps("ERRORS:\n" + errors)))
+ if (len(errors) > 0):
+ debug('updateLog("{0}");'.format(json.dumps("ERRORS:\n"+errors)))
self.write_log("ERRORS:\n{0}".format(errors))
return
# eEMEP runs up-to 23 km, so remove all ash above 23 km,
# See Varsling av vulkanaske i norsk luftrom - driftsfase,
# February 2020 for details
- eemep_cheight_max = 23000.0 - altf
- if cheight > eemep_cheight_max:
+ eemep_cheight_max = 23000.0-altf
+ if (cheight > eemep_cheight_max):
rate_fraction = eemep_cheight_max / cheight
- self.write_log(
- "Cropping ash cloud to {:.2f} km from {:.2f} km using factor {:.3f}".format(
- eemep_cheight_max / 1000.0, cheight / 1000.0, rate_fraction
- )
- )
+ self.write_log("Cropping ash cloud to {:.2f} km from {:.2f} km using factor {:.3f}".format(eemep_cheight_max/1000.0, cheight/1000.0, rate_fraction))
rate = rate * rate_fraction
cheight = eemep_cheight_max
eruptions = []
eruption = ''
- eruptions.append(
- eruption.format(
- start=startDT.isoformat(),
- end=(startDT + datetime.timedelta(hours=runTime)).isoformat(),
- bottom=0,
- top=cheight,
- rate=rate,
- m63=volctype["m63"],
- )
- )
-
- self.lastOutputDir = os.path.join(
- self.res.getOutputDir(), "{0}_ondemand".format(volcano)
- )
- self.volcano_file = os.path.join(
- self.lastOutputDir, ModelRunner.VOLCANO_FILENAME
- )
+ eruptions.append(eruption.format(start=startDT.isoformat(),
+ end=(startDT + datetime.timedelta(hours=runTime)).isoformat(),
+ bottom=0,
+ top=cheight,
+ rate=rate,
+ m63=volctype['m63']))
+
+ self.lastOutputDir = os.path.join(self.res.getOutputDir(), "{0}_ondemand".format(volcano))
+ self.volcano_file = os.path.join(self.lastOutputDir, ModelRunner.VOLCANO_FILENAME)
self.lastQDict = qDict
sourceTerm = """
@@ -416,44 +345,34 @@ def run_eemep_query(self, qDict):
"""
- ecModelRun = qDict["ecmodelrun"]
+ ecModelRun = qDict['ecmodelrun'];
if not ecModelRun == "best":
ecModelRun += "Z"
- self.lastSourceTerm = sourceTerm.format(
- lat=latf,
- lon=lonf,
- volcano=volcano,
- alt=altf,
- outdir=self.lastOutputDir,
- restart=restart,
- model_run=ecModelRun,
- model_start_time=modelStartDT.isoformat(),
- eruptions="\n".join(eruptions),
- runTime=runTime,
- )
+ self.lastSourceTerm = sourceTerm.format(lat=latf, lon=lonf,
+ volcano=volcano,
+ alt=altf,
+ outdir=self.lastOutputDir,
+ restart=restart,
+ model_run=ecModelRun,
+ model_start_time=modelStartDT.isoformat(),
+ eruptions="\n".join(eruptions),
+ runTime=runTime)
debug("output directory: {}".format(self.lastOutputDir))
os.makedirs(self.lastOutputDir, exist_ok=True)
- self.volcano_logfile = os.path.join(self.lastOutputDir, "volcano.log")
- if os.path.exists(self.volcano_logfile):
- logdate = datetime.datetime.fromtimestamp(
- os.path.getmtime(self.volcano_logfile)
- )
- os.rename(
- self.volcano_logfile,
- "{}_{}".format(self.volcano_logfile, logdate.strftime("%Y%m%dT%H%M%S")),
- )
+ self.volcano_logfile = os.path.join(self.lastOutputDir,"volcano.log")
+ if (os.path.exists(self.volcano_logfile)):
+ logdate = datetime.datetime.fromtimestamp(os.path.getmtime(self.volcano_logfile))
+ os.rename(self.volcano_logfile, "{}_{}".format(self.volcano_logfile, logdate.strftime("%Y%m%dT%H%M%S")))
try:
# Mode x - open for exclusive creation, failing if the file already exists
- with open(self.volcano_file, "x") as fh:
+ with open(self.volcano_file,'x') as fh:
fh.write(self.lastSourceTerm)
- except FileExistsError:
+ except FileExistsError as e:
owner = "unknown"
- if os.path.exists(self.volcano_file):
+ if (os.path.exists(self.volcano_file)):
owner, gecos = getFileOwner(self.volcano_file)
- errmsg = "ERROR: Run ({:s}) already exists!\nCreated by user {:s} ({:s}).\nPlease try again later.".format(
- self.volcano_file, owner, gecos
- )
+ errmsg = "ERROR: Run ({:s}) already exists!\nCreated by user {:s} ({:s}).\nPlease try again later.".format(self.volcano_file, owner, gecos)
debug('updateLog("{0}");'.format(json.dumps(errmsg)))
self.write_log(errmsg)
return
@@ -464,7 +383,6 @@ def run_eemep_query(self, qDict):
self.model_update.update_log_signal.connect(self.update_log)
self.model_update.start(QThread.LowPriority)
-
if __name__ == "__main__":
app = QtWidgets.QApplication(sys.argv)
ctr = Controller()
diff --git a/utils/SnapPy/Snappy/EEMEP/ModelRunner.py b/utils/SnapPy/Snappy/EEMEP/ModelRunner.py
index 66307845..ca0ce359 100644
--- a/utils/SnapPy/Snappy/EEMEP/ModelRunner.py
+++ b/utils/SnapPy/Snappy/EEMEP/ModelRunner.py
@@ -15,13 +15,12 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
#
-"""
+from Snappy.EcMeteorologyCalculator import EcMeteorologyCalculator
+'''
Created on Sep 2, 2016
@author: heikok
-"""
-from Snappy.EcMeteorologyCalculator import EcMeteorologyCalculator
-
+'''
from METNO.HPC import HPC, StatusFile, QJobStatus
import datetime
from netCDF4 import Dataset, num2date
@@ -42,31 +41,23 @@
import Snappy.Resources
-class AbortFile:
+class AbortFile():
"""Abort control with a filename. Abort as soon as the file disappears.
The file must be read and writable, or abort is not supported."""
-
def __init__(self, filename):
self.filename = None
if filename:
try:
if os.path.exists(filename):
- print(
- "abortfile '{}' exists, removing".format(filename),
- file=sys.stderr,
- )
+ print("abortfile '{}' exists, removing".format(filename), file=sys.stderr)
os.remove(filename)
- with open(filename, "wt") as fh:
+ with open(filename, 'wt') as fh:
fh.write("delete this file to abort processing")
self.filename = filename
except OSError:
- print(
- "cannot write filename: '{}', modelrunner abort disabled".format(
- filename
- ),
- sys.stderr,
- )
+ print("cannot write filename: '{}', modelrunner abort disabled".format(filename),
+ sys.stderr)
def abortRequested(self):
"""return TRUE if abort requested, FALSE if we should continue"""
@@ -81,11 +72,10 @@ def __del__(self):
if self.filename and os.path.exists(self.filename):
try:
os.remove(self.filename)
- except Exception:
+ except:
pass
-
-class ModelRunner:
+class ModelRunner():
VOLCANO_FILENAME = "volcano.xml"
NPP_FILENAME = "npp.xml"
@@ -95,21 +85,20 @@ class ModelRunner:
logger = None
+
@staticmethod
def getLogger(path=None):
if path is None:
if ModelRunner.logger is None:
- raise Exception(
- "getLogger() called without being initialized with path"
- )
+ raise Exception('getLogger() called without being initialized with path')
return ModelRunner.logger
ModelRunner.logger = logging.getLogger("ModelRunner")
ModelRunner.logger.setLevel(logging.DEBUG)
- fmt = logging.Formatter("%(asctime)s: %(message)s", datefmt="%Y%m%dT%H%M%SZ")
- fmt.converter = gmtime # Make sure we are using UTC time
+ fmt = logging.Formatter('%(asctime)s: %(message)s', datefmt="%Y%m%dT%H%M%SZ")
+ fmt.converter = gmtime #Make sure we are using UTC time
# logging to file
- fh = logging.FileHandler(os.path.join(path, "volcano.log"))
+ fh = logging.FileHandler(os.path.join(path, 'volcano.log'))
fh.setLevel(logging.DEBUG)
fh.setFormatter(fmt)
# errors on stderr, too (e.g. for cron)
@@ -122,9 +111,9 @@ def getLogger(path=None):
return ModelRunner.logger
def __init__(self, path, hpcMachine, npp=False):
- """
+ '''
for correct working logs, make sure to have ModelRunner.getLogger(path=...) called before initialization
- """
+ '''
self.npp = npp
self.upload_files = set()
self.timestamp = datetime.datetime.now()
@@ -135,7 +124,7 @@ def __init__(self, path, hpcMachine, npp=False):
self.hpcMachine = hpcMachine
self.inpath = path
- # Set up logging
+ #Set up logging
self.logger = self.getLogger()
self.rundir = self.res.getHPCRunDir(self.hpcMachine)
@@ -144,6 +133,7 @@ def __init__(self, path, hpcMachine, npp=False):
else:
volcano_path = os.path.join(path, ModelRunner.VOLCANO_FILENAME)
+
if not os.path.exists(volcano_path):
raise Exception("no such file or directory: {}".format(volcano_path))
@@ -156,33 +146,31 @@ def __init__(self, path, hpcMachine, npp=False):
self.path = self.volcano.outputDir
os.makedirs(name=self.path, exist_ok=True)
- self.abortRequest = AbortFile(
- os.path.join(self.inpath, ModelRunner.ABORT_FILENAME)
- )
+ self.abortRequest = AbortFile(os.path.join(self.inpath, ModelRunner.ABORT_FILENAME))
self._volcano_to_column_source()
self._get_meteo_files()
self._get_restart_file()
self._create_job_script()
def _volcano_to_column_source(self):
- """write columnsource_location.csv and columnsource_emissions.csv from volcano.xml"""
+ '''write columnsource_location.csv and columnsource_emissions.csv from volcano.xml'''
location = os.path.join(self.path, "columnsource_location.csv")
- with open(location, "wt") as lh:
+ with open(location, 'wt') as lh:
lh.write(self.volcano.get_columnsource_location())
self.upload_files.add(location)
emission = os.path.join(self.path, "columnsource_emission.csv")
- with open(emission, "wt") as eh:
+ with open(emission, 'wt') as eh:
eh.write(self.volcano.get_columnsource_emission())
self.upload_files.add(emission)
def _generate_meteo_file(self, outfile, date_files):
- """Generate a meteo file, eventually by concatenting several input-files to get a file with 8 timesteps
+ '''Generate a meteo file, eventually by concatenting several input-files to get a file with 8 timesteps
Args:
outfile: output filename
date_files: list of pairs, each pair consisting of a input-file and the number of time-steps (3hourly) containted in this timestep
- """
+ '''
if os.path.islink(outfile):
os.unlink(outfile)
if os.path.isfile(outfile):
@@ -191,7 +179,7 @@ def _generate_meteo_file(self, outfile, date_files):
elif os.access(outfile, os.R_OK):
# file only readable, forced to that file
return
- if date_files[0][1] == 8:
+ if (date_files[0][1] == 8):
# no file-concatenation needed, just create a link
if not os.path.lexists(outfile):
os.symlink(date_files[0][0], outfile)
@@ -205,8 +193,8 @@ def _generate_meteo_file(self, outfile, date_files):
continue
use_steps.append((file, newsteps, timesteps_in_file))
timesteps_in_file = timesteps_in_file + newsteps
- assert timesteps_in_file <= 8
- if timesteps_in_file == 8:
+ assert(timesteps_in_file <= 8)
+ if (timesteps_in_file == 8):
break
# create a list of all files needed (first date first) and
# find the timesteps to select from the joined files. (from first date to last date)
@@ -221,97 +209,76 @@ def _generate_meteo_file(self, outfile, date_files):
files.append(''.format(file=file))
# run fimex on files/steps
- joinfiles = """
+ joinfiles = '''
{files}
-"""
- ncml_file = os.path.join(self.path, "joinMeteo.ncml")
- with open(ncml_file, "wt") as fh:
+'''
+ ncml_file = os.path.join(self.path, 'joinMeteo.ncml')
+ with open(ncml_file, 'wt') as fh:
fh.write(joinfiles.format(files="\n".join(files)))
- subprocess.call(
- args=[
- "fimex",
- "--input.file",
- ncml_file,
- "--output.file",
- outfile,
- "--output.type=nc4",
- "--extract.pickDimension.name=time",
- "--extract.pickDimension.list={}".format(
- ",".join(str(x) for x in steps)
- ),
- ]
- )
+ subprocess.call(args=['fimex', '--input.file', ncml_file,
+ '--output.file', outfile,
+ '--output.type=nc4',
+ '--extract.pickDimension.name=time',
+ '--extract.pickDimension.list={}'.format(','.join(str(x) for x in steps))])
+
def _get_meteo_files(self):
- """Create meteorology files in the output-directory of volcano.xml.
+ '''Create meteorology files in the output-directory of volcano.xml.
This involves linking and copying of needed meteorology. and eventually
addition of a few timesteps at the beginning of the run
Returns: list of meteorology files
- """
+ '''
(ref_date, model_start_time) = self.volcano.get_meteo_dates()
sres = Snappy.Resources.Resources()
border = 7
- if (
- self.volcano.latitude > (sres.ecDefaultDomainStartY + border)
- and self.volcano.latitude
- < (sres.ecDefaultDomainStartY + sres.ecDomainHeight - border)
- and self.volcano.longitude > (sres.ecDefaultDomainStartX + border)
- and self.volcano.longitude
- < (sres.ecDefaultDomainStartX + sres.ecDomainWidth - border)
- ):
+ if (self.volcano.latitude > (sres.ecDefaultDomainStartY + border) and
+ self.volcano.latitude < (sres.ecDefaultDomainStartY + sres.ecDomainHeight - border) and
+ self.volcano.longitude > (sres.ecDefaultDomainStartX + border) and
+ self.volcano.longitude < (sres.ecDefaultDomainStartX + sres.ecDomainWidth - border)):
files = self.res.getECMeteorologyFiles(model_start_time, 72, ref_date)
else:
self.logger.debug("Calculating Meteorology, takes about 15min")
# make sure to use the 00UTC meteorology, eemep needs start-time at midnight
start_time = model_start_time.replace(hour=3)
- ecMetCalc = EcMeteorologyCalculator(
- EcMeteorologyCalculator.getGlobalMeteoResources(),
- dtime=start_time,
- domainCenterX=self.volcano.longitude,
- domainCenterY=self.volcano.latitude,
- )
+ ecMetCalc = EcMeteorologyCalculator(EcMeteorologyCalculator.getGlobalMeteoResources(),
+ dtime=start_time,
+ domainCenterX=self.volcano.longitude,
+ domainCenterY=self.volcano.latitude)
ecMetCalc.calc()
- files = [[(x, 8)] for x in ecMetCalc.get_meteorology_files()]
+ files = [[ (x, 8) ] for x in ecMetCalc.get_meteorology_files()]
self.logger.debug("Meteorology calculated")
for i, date_files in enumerate(files):
file_date = model_start_time + datetime.timedelta(days=i)
- outfile = os.path.join(
- self.path, "meteo{date}.nc".format(date=file_date.strftime("%Y%m%d"))
- )
+ outfile = os.path.join(self.path, "meteo{date}.nc".format(date=file_date.strftime("%Y%m%d")))
self._generate_meteo_file(outfile, date_files)
self.upload_files.add(outfile)
# vlevel-definition
vlevels = self.res.getVerticalLevelDefinition()
vfile = os.path.join(self.path, "Vertical_levels.txt")
- with open(vfile, "w") as vh:
+ with open(vfile, 'w') as vh:
vh.write(vlevels)
self.upload_files.add(vfile)
def _get_restart_file(self):
- if self.volcano.run_as_restart():
+ if (self.volcano.run_as_restart()):
model_start_time = self.volcano.get_meteo_dates()[1]
- restart_file = os.path.join(
- self.path,
- "EMEP_IN_{date}.nc".format(date=model_start_time.strftime("%Y%m%d")),
- )
- if os.path.exists(restart_file):
+ restart_file = os.path.join(self.path, "EMEP_IN_{date}.nc".format(date=model_start_time.strftime("%Y%m%d")))
+ if (os.path.exists(restart_file)):
self.upload_files.add(restart_file)
def _create_job_script(self):
npp_extension = "_npp" if self.npp else ""
- job = self.res.get_job_script(self.hpcMachine + npp_extension)
- defs = {
- "rundir": self.rundir,
- "runtag": self.runtag,
- } # year, month, day, hour, runhour
+ job = self.res.get_job_script(self.hpcMachine+npp_extension)
+ defs = {"rundir": self.rundir,
+ "runtag": self.runtag} # year, month, day, hour, runhour
defs["runhour"] = "{}".format(int(self.volcano.runTimeHours))
start_time = self.volcano.get_meteo_dates()[1]
defs["year"] = start_time.year
@@ -319,13 +286,11 @@ def _create_job_script(self):
defs["day"] = "{:02d}".format(start_time.day)
defs["hour"] = "{:02d}".format(start_time.hour)
- self.logger.debug(
- "Creating job script with the following definitions: {:s}".format(str(defs))
- )
+ self.logger.debug("Creating job script with the following definitions: {:s}".format(str(defs)))
filename = os.path.join(self.path, self.jobscript)
- with open(filename, "wt") as jh:
+ with open(filename, 'wt') as jh:
jh.write(job.format(**defs))
self.upload_files.add(filename)
@@ -335,21 +300,15 @@ def _write_log(self, msg):
def clean_old_files(self):
"""Delete files fromprevious runs"""
- self.logger.debug(
- "cleaning files in {}:{}".format(self.hpcMachine, self.hpc_outdir)
- )
+ self.logger.debug("cleaning files in {}:{}".format(self.hpcMachine, self.hpc_outdir))
- hpc_files_to_delete = (
- list(self.upload_files)
- + [self.statusfile]
- + [ModelRunner.OUTPUT_AVERAGE_FILENAME]
- + [ModelRunner.OUTPUT_INSTANT_FILENAME]
- )
+ hpc_files_to_delete = list(self.upload_files) \
+ + [self.statusfile] \
+ + [ModelRunner.OUTPUT_AVERAGE_FILENAME] \
+ + [ModelRunner.OUTPUT_INSTANT_FILENAME]
hpc_files_to_delete = [os.path.basename(f) for f in hpc_files_to_delete]
- hpc_files_to_delete = [
- os.path.join(self.hpc_outdir, f) for f in hpc_files_to_delete
- ]
+ hpc_files_to_delete = [os.path.join(self.hpc_outdir, f) for f in hpc_files_to_delete]
self.hpc.syscall("rm", ["-f"] + hpc_files_to_delete)
def do_upload_files(self):
@@ -362,7 +321,7 @@ def do_upload_files(self):
def get_run_file_ages(self):
"""Return age of files on HPC"""
try:
- # Get current date and date of files on HPC
+ #Get current date and date of files on HPC
hpc_date, cerr, retval = self.hpc.syscall("date", ["+%s"], timeout=30)
if retval != 0:
self.logger.error("Tried to get date, got cerr {:s}".format(cerr))
@@ -371,49 +330,42 @@ def get_run_file_ages(self):
if retval != 0:
self.logger.error("Tried to call find, got cerr {:s}".format(cerr))
return None
- stat_out, cerr, retval = self.hpc.syscall(
- "stat", ["-c", "%Y %n"] + files.splitlines()
- )
+ stat_out, cerr, retval = self.hpc.syscall("stat", ["-c", "%Y %n"] + files.splitlines())
if retval != 0:
self.logger.error("Tried to call stat, got cerr {:s}".format(cerr))
return None
except Exception as ex:
- self.logger.debug(
- "Could not stat files on HPC machine: {ex}".format(ex=ex.args)
- )
+ self.logger.debug("Could not stat files on HPC machine: {ex}".format(ex=ex.args))
return None
- # Process dates to compute age of all files
+ #Process dates to compute age of all files
hpc_date = datetime.datetime.fromtimestamp(int(hpc_date))
self.logger.debug("HPC date: '{}'".format(str(hpc_date)))
file_age = {}
for line in stat_out.splitlines():
- date, filename = line.split(" ")
+ date, filename = line.split(' ')
file_age[filename] = hpc_date - datetime.datetime.fromtimestamp(int(date))
- self.logger.debug(
- "Age of '{:s}' is {:s}".format(filename, str(file_age[filename]))
- )
+ self.logger.debug("Age of '{:s}' is {:s}".format(filename, str(file_age[filename])))
return file_age
+
+
+
def run_and_wait(self):
- """Start the model and wait for it to finish
+ '''Start the model and wait for it to finish
Returns QJobStatus code
- """
- self.logger.debug(
- "starting run on hpc {}: {}".format(self.hpcMachine, self.hpc_outdir)
- )
+ '''
+ self.logger.debug("starting run on hpc {}: {}".format(self.hpcMachine, self.hpc_outdir))
remote_jobscript = os.path.join(self.hpc_outdir, self.jobscript)
qjob = self.hpc.submit_job(remote_jobscript, [])
- qjob.status_file = StatusFile(
- os.path.join(self.hpc_outdir, self.statusfile), "finished"
- )
+ qjob.status_file = StatusFile(os.path.join(self.hpc_outdir, self.statusfile), "finished")
# wait for 60 minutes to finish, check every minute
- sleep_time = 60 # seconds
- count = 60 # * sleep_time
+ sleep_time = 60 # seconds
+ count = 60 # * sleep_time
status = self.hpc.get_status(qjob)
while not (status == QJobStatus.finished or status == QJobStatus.failed):
sleep(sleep_time)
@@ -422,149 +374,99 @@ def run_and_wait(self):
self.hpc.delete_job(qjob)
break
status = self.hpc.get_status(qjob)
- self.logger.debug(
- "jobstatus on hpc {} jobid={}: {}".format(
- self.hpcMachine, qjob.jobid, status
- )
- )
+ self.logger.debug("jobstatus on hpc {} jobid={}: {}".format(self.hpcMachine, qjob.jobid, status))
return status
def download_results(self):
- """download the result-files, and rename them as appropriate"""
+ '''download the result-files, and rename them as appropriate'''
start_time = self.volcano.get_meteo_dates()[1]
tomorrow = (start_time + datetime.timedelta(days=1)).strftime("%Y%m%d")
- # Get age of files on HPC
+ #Get age of files on HPC
file_age = self.get_run_file_ages()
if file_age is None:
- self.logger.error(
- f"Could not get run-file ages on {self.hpcMachine} - something is wrong!"
- )
+ self.logger.error(f"Could not get run-file ages on {self.hpcMachine} - something is wrong!")
return
- # Download output files
- for filename in [
- ModelRunner.OUTPUT_AVERAGE_FILENAME,
- ModelRunner.OUTPUT_INSTANT_FILENAME,
- ]:
+ #Download output files
+ for filename in [ModelRunner.OUTPUT_AVERAGE_FILENAME, ModelRunner.OUTPUT_INSTANT_FILENAME]:
filename_local = os.path.join(self.path, filename)
filename = os.path.join(self.hpc_outdir, filename)
if filename in file_age:
age = file_age[filename] / datetime.timedelta(minutes=1)
else:
age = 999
- if age > 120:
- self.logger.error(
- "File {} too old on {}".format(filename, self.hpcMachine)
- )
+ if (age > 120):
+ self.logger.error("File {} too old on {}".format(filename, self.hpcMachine))
return
- self.logger.debug(
- "downloading {}:{} to {}".format(filename, self.hpcMachine, self.path)
- )
+ self.logger.debug("downloading {}:{} to {}".format(filename, self.hpcMachine, self.path))
self.hpc.get_files([filename], self.path, 1200)
- # Check sanity of output results
+ #Check sanity of output results
try:
with Dataset(filename_local) as nc_file:
- time_var = nc_file["time"]
- times = num2date(time_var[:], units=time_var.units).astype(
- "datetime64[ns]"
- )
+ time_var = nc_file['time']
+ times = num2date(time_var[:], units = time_var.units).astype('datetime64[ns]')
except Exception as e:
- self.logger.error(
- "Unable to open NetCDF file {:s}: {:s}".format(
- filename_local, str(e)
- )
- )
+ self.logger.error("Unable to open NetCDF file {:s}: {:s}".format(filename_local, str(e)))
return
- self.logger.debug(
- "File {:s} contains the following timesteps: {:s}..{:s}".format(
- filename_local, str(times[0]), str(times[-1])
- )
- )
- if len(times) < self.volcano.runTimeHours:
- self.logger.warning(
- "WARNING: File {:s} appears not to have the correct timesteps!".format(
- filename_local
- )
- )
-
- # Download initial conditions for continued run
- file = "EMEP_OUT_{}.nc".format(tomorrow)
+ self.logger.debug("File {:s} contains the following timesteps: {:s}..{:s}".format(filename_local, str(times[0]), str(times[-1])))
+ if (len(times) < self.volcano.runTimeHours):
+ self.logger.warning("WARNING: File {:s} appears not to have the correct timesteps!".format(filename_local))
+
+ #Download initial conditions for continued run
+ file = 'EMEP_OUT_{}.nc'.format(tomorrow)
age = file_age.pop(os.path.join(self.hpc_outdir, file), None)
- if age is None:
- self.logger.error(
- "File {} does not exist on {}".format(file, self.hpcMachine)
- )
+ if (age is None):
+ self.logger.error("File {} does not exist on {}".format(file, self.hpcMachine))
return
- if age / datetime.timedelta(minutes=1) > 120:
+ if (age/datetime.timedelta(minutes=1) > 120):
self.logger.error("File {} too old on {}".format(file, self.hpcMachine))
return
- self.logger.debug(
- "downloading {}:{} to {}".format(file, self.hpcMachine, self.path)
- )
- try:
+ self.logger.debug("downloading {}:{} to {}".format(file, self.hpcMachine, self.path))
+ try :
self.hpc.get_files([os.path.join(self.hpc_outdir, file)], self.path, 1200)
except Exception as ex:
# not dangerous if it fail, but remove file
- self.logger.debug(
- "couldn't download '{}', ignoring: {}".format(file, ex.args)
- )
+ self.logger.debug("couldn't download '{}', ignoring: {}".format(file, ex.args))
filename = os.path.join(self.path, file)
- if os.path.lexists(filename):
- os.unlink(filename)
+ if os.path.lexists(filename): os.unlink(filename)
else:
- os.rename(
- os.path.join(self.path, file),
- os.path.join(self.path, "EMEP_IN_{}.nc".format(tomorrow)),
- )
+ os.rename(os.path.join(self.path, file),
+ os.path.join(self.path, 'EMEP_IN_{}.nc'.format(tomorrow)))
- # Postprocess
+ #Postprocess
pp = PostProcess(self.path, self.timestamp, logger=self)
if self.npp:
- pp.accumulate_and_toa_nuc_files(
- os.path.join(self.path, ModelRunner.OUTPUT_INSTANT_FILENAME),
- os.path.join(self.path, ModelRunner.OUTPUT_AVERAGE_FILENAME),
- )
+ pp.accumulate_and_toa_nuc_files(os.path.join(self.path, ModelRunner.OUTPUT_INSTANT_FILENAME),
+ os.path.join(self.path, ModelRunner.OUTPUT_AVERAGE_FILENAME))
else:
- pp.convert_files(
- os.path.join(self.path, ModelRunner.OUTPUT_INSTANT_FILENAME),
- os.path.join(self.path, ModelRunner.OUTPUT_AVERAGE_FILENAME),
- )
+ pp.convert_files(os.path.join(self.path, ModelRunner.OUTPUT_INSTANT_FILENAME),
+ os.path.join(self.path, ModelRunner.OUTPUT_AVERAGE_FILENAME))
# cleanup softlinks in output-dir
- findArgs = [self.hpc_outdir, "-type", "l", "-delete"]
+ findArgs = [self.hpc_outdir, '-type', 'l', '-delete']
try:
- self.hpc.syscall("find", findArgs, timeout=30)
+ self.hpc.syscall('find', findArgs, timeout=30)
except Exception as ex:
- self.logger.warning(
- "cannot excecute command 'find {args}': {ex}".format(
- args=" ".join(findArgs), ex=ex.args
- )
- )
+ self.logger.warning("cannot excecute command 'find {args}': {ex}".format(args=" ".join(findArgs),
+ ex=ex.args))
def work(self):
- """do the complete work, e.g. upload, run, wait and download"""
+ '''do the complete work, e.g. upload, run, wait and download'''
self.clean_old_files()
self.do_upload_files()
status = self.run_and_wait()
- if status == QJobStatus.failed:
+ if (status == QJobStatus.failed):
self.logger.error("HPC-job failed: Not downloading any results.")
- elif status == QJobStatus.queued:
- self.logger.error(
- "HPC-resource not available on {}, giving up.".format(self.hpcMachine)
- )
- elif status == QJobStatus.running:
- self.logger.error(
- "HPC-job on {} not finished in time, downloading partial".format(
- self.hpcMachine
- )
- )
+ elif (status == QJobStatus.queued):
+ self.logger.error("HPC-resource not available on {}, giving up.".format(self.hpcMachine))
+ elif (status == QJobStatus.running):
+ self.logger.error("HPC-job on {} not finished in time, downloading partial".format(self.hpcMachine))
else:
self.download_results()
-
class TestModelRunner(unittest.TestCase):
hpcMachine = "ppi_centos7_direct"
doRun = True
@@ -573,12 +475,12 @@ def setUp(self):
self.logger = logging.getLogger("TestModelRunner")
unittest.TestCase.setUp(self)
- self.indir = os.path.join(os.path.dirname(__file__), "test")
+ self.indir = os.path.join(os.path.dirname(__file__),"test")
self.logger.debug("Input dir: {:s}".format(self.indir))
volcanoFile = os.path.join(self.indir, "volcano.xml")
self.logger.debug("Input volcano file: {:s}".format(volcanoFile))
- VolcanoRun(volcanoFile)
+ volc = VolcanoRun(volcanoFile)
self.dir = tempfile.TemporaryDirectory(prefix="volcano_download_")
self.logger.debug("Download directory: {:s}".format(self.dir.name))
@@ -587,150 +489,108 @@ def setUp(self):
with open(os.path.join(self.dir.name, "volcano.xml"), "wt") as oh:
with open(volcanoFile, "rt") as ih:
for line in ih:
- line = re.sub("2016-11-03", yesterday.strftime("%Y-%m-%d"), line)
+ line = re.sub('2016-11-03', yesterday.strftime('%Y-%m-%d'), line)
oh.write(line)
- self.files = (
- "columnsource_location.csv",
- "columnsource_emission.csv",
- "eemep_script.job",
- )
+ self.files = ('columnsource_location.csv', 'columnsource_emission.csv', 'eemep_script.job')
self.output_files = ("eemep_hourInst.nc", "eemep_hour.nc")
+
def testModelRunner(self):
- # Create model runner
+ #Create model runner
mr = ModelRunner(self.dir.name, TestModelRunner.hpcMachine)
- self.logger.debug(
- "Modelrunner setup complete, local outdir is {:s}".format(mr.hpc_outdir)
- )
- self.logger.debug(
- "Modelrunner setup complete, HPC outdir is {:s}".format(mr.hpc_outdir)
- )
-
- # Test uploading of files
+ self.logger.debug("Modelrunner setup complete, local outdir is {:s}".format(mr.hpc_outdir))
+ self.logger.debug("Modelrunner setup complete, HPC outdir is {:s}".format(mr.hpc_outdir))
+
+ #Test uploading of files
mr.do_upload_files()
self.logger.debug("Files uploaded")
file_ages = mr.get_run_file_ages()
self.assertTrue(file_ages is not None, "Could not get file ages!")
- # Check that we find meteo files
+ #Check that we find meteo files
meteo_count = 0
for x in file_ages.keys():
self.logger.debug("Found file {:s}".format(x))
- if re.search(r"meteo\d{8}.nc", x):
+ if re.search(r'meteo\d{8}.nc', x):
self.logger.debug("Found meteo file {:s}".format(x))
meteo_count += 1
- # FIXME: This changes. 09:38 it gives 3 files. +36 hours related, i.e., should give 4 when after 12, and 3 between 00 and 12?
+ #FIXME: This changes. 09:38 it gives 3 files. +36 hours related, i.e., should give 4 when after 12, and 3 between 00 and 12?
self.assertTrue(meteo_count >= 3, msg="Meteo files not created!")
- # Check that we find the expected config files
+ #Check that we find the expected config files
for filename in self.files:
filename = os.path.join(mr.hpc_outdir, filename)
- self.assertTrue(
- filename in file_ages.keys(), "Could not find {:s}".format(filename)
- )
- self.logger.debug(
- "Input file '{:s}' is {:s} old".format(
- filename, str(file_ages[filename])
- )
- )
+ self.assertTrue(filename in file_ages.keys(), "Could not find {:s}".format(filename))
+ self.logger.debug("Input file '{:s}' is {:s} old".format(filename, str(file_ages[filename])))
self.assertTrue(file_ages[filename] / datetime.timedelta(minutes=1) < 15)
- if self.doRun is False:
+ if (self.doRun == False):
self.logger.debug("Skipping remainder of test - not doRun is false")
else:
- # Test running.
+ #Test running.
status = mr.run_and_wait()
- self.assertTrue(
- status == QJobStatus.finished,
- "Run and wait returned unexpected status {:s}".format(str(status)),
- )
+ self.assertTrue(status == QJobStatus.finished, "Run and wait returned unexpected status {:s}".format(str(status)))
file_ages = mr.get_run_file_ages()
self.assertTrue(len(file_ages.keys()) > len(self.files))
- for filename in [
- ModelRunner.OUTPUT_INSTANT_FILENAME,
- ModelRunner.OUTPUT_AVERAGE_FILENAME,
- ]:
+ for filename in [ModelRunner.OUTPUT_INSTANT_FILENAME, ModelRunner.OUTPUT_AVERAGE_FILENAME]:
filename = os.path.join(mr.hpc_outdir, filename)
self.assertTrue(filename in file_ages.keys(), " ")
- self.logger.debug(
- "Output file '{:s}' is {:s} minutes old".format(
- filename, str(file_ages[filename])
- )
- )
- self.assertTrue(
- file_ages[filename] / datetime.timedelta(minutes=1) < 15
- )
-
- # Test downloading / postprocessing
+ self.logger.debug("Output file '{:s}' is {:s} minutes old".format(filename, str(file_ages[filename])))
+ self.assertTrue(file_ages[filename] / datetime.timedelta(minutes=1) < 15)
+
+ #Test downloading / postprocessing
mr.download_results()
- for pattern in ["eemep_hourInst_*.nc", "eemep_hour_*.nc", "EMEP_IN_*.nc"]:
+ for pattern in ['eemep_hourInst_*.nc', 'eemep_hour_*.nc', 'EMEP_IN_*.nc']:
self.logger.debug("Checking for pattern '{:s}'".format(pattern))
files = glob.glob(os.path.join(mr.path, pattern))
timestamp_ok = False
for f in files:
- age = datetime.datetime.now() - datetime.datetime.fromtimestamp(
- os.path.getmtime(f)
- )
+ age = (datetime.datetime.now() - datetime.datetime.fromtimestamp(os.path.getmtime(f)))
self.logger.debug("Found {:s} with age {:s}".format(f, str(age)))
- if age / datetime.timedelta(minutes=1) < 120:
+ if (age / datetime.timedelta(minutes=1) < 120):
self.logger.debug("Age OK, skipping remaining files")
timestamp_ok = True
- self.assertTrue(
- timestamp_ok,
- msg="Could not find file matching {:s}".format(pattern),
- )
+ self.assertTrue(timestamp_ok, msg="Could not find file matching {:s}".format(pattern))
- # Test cleanup
+ #Test cleanup
mr.clean_old_files()
file_ages = mr.get_run_file_ages()
self.assertTrue(file_ages is not None, msg="")
- self.assertTrue(
- len(file_ages.keys()) >= len(self.files),
- msg="Too few files in output directory!",
- )
- for filename in [
- ModelRunner.OUTPUT_INSTANT_FILENAME,
- ModelRunner.OUTPUT_AVERAGE_FILENAME,
- mr.statusfile,
- ]:
+ self.assertTrue(len(file_ages.keys()) >= len(self.files), msg="Too few files in output directory!")
+ for filename in [ModelRunner.OUTPUT_INSTANT_FILENAME, ModelRunner.OUTPUT_AVERAGE_FILENAME, mr.statusfile]:
filename = os.path.join(mr.hpc_outdir, filename)
self.assertFalse(filename in file_ages.keys(), " ")
- @unittest.skipIf(doRun is False, "Do run is false")
+ @unittest.skipIf(doRun==False, "Do run is false")
def testWork(self):
- # Create model runner and test
+ #Create model runner and test
mr = ModelRunner(self.dir.name, TestModelRunner.hpcMachine)
mr.work()
- for pattern in ["eemep_hourInst_*.nc", "eemep_hour_*.nc", "EMEP_IN_*.nc"]:
+ for pattern in ['eemep_hourInst_*.nc', 'eemep_hour_*.nc', 'EMEP_IN_*.nc']:
self.logger.debug("Checking for pattern '{:s}'".format(pattern))
files = glob.glob(os.path.join(mr.path, pattern))
timestamp_ok = False
for f in files:
- age = datetime.datetime.now() - datetime.datetime.fromtimestamp(
- os.path.getmtime(f)
- )
+ age = (datetime.datetime.now() - datetime.datetime.fromtimestamp(os.path.getmtime(f)))
self.logger.debug("Found {:s} with age {:s}".format(f, str(age)))
- if age / datetime.timedelta(minutes=1) < 120:
+ if (age / datetime.timedelta(minutes=1) < 120):
self.logger.debug("Age OK, skipping remaining files")
timestamp_ok = True
- self.assertTrue(
- timestamp_ok, msg="Could not find file matching {:s}".format(pattern)
- )
+ self.assertTrue(timestamp_ok, msg="Could not find file matching {:s}".format(pattern))
+
if __name__ == "__main__":
- logging.basicConfig(
- format="%(asctime)s: %(message)s", datefmt="%Y%m%dT%H%M%SZ", stream=sys.stderr
- )
+ logging.basicConfig(format='%(asctime)s: %(message)s', datefmt="%Y%m%dT%H%M%SZ", stream=sys.stderr)
logging.root.setLevel(logging.NOTSET)
- # logging.getLogger("TestModelRunner").setLevel(logging.DEBUG)
- # logging.getLogger("ModelRunner").
+ #logging.getLogger("TestModelRunner").setLevel(logging.DEBUG)
+ #logging.getLogger("ModelRunner").
- # Do not sort tests
+ #Do not sort tests
- logging.warning("This test takes 1-2 hours to complete")
+ self.logger.warning("This test takes 1-2 hours to complete")
unittest.TestLoader.sortTestMethodsUsing = None
unittest.main(verbosity=2, failfast=True)
diff --git a/utils/SnapPy/Snappy/EEMEP/NppRun.py b/utils/SnapPy/Snappy/EEMEP/NppRun.py
index 9d8a5a05..f7a75c7a 100644
--- a/utils/SnapPy/Snappy/EEMEP/NppRun.py
+++ b/utils/SnapPy/Snappy/EEMEP/NppRun.py
@@ -1,53 +1,52 @@
# SNAP: Servere Nuclear Accident Programme
# Copyright (C) 1992-2017 Norwegian Meteorological Institute
-#
-# This file is part of SNAP. SNAP is free software: you can
-# redistribute it and/or modify it under the terms of the
-# GNU General Public License as published by the
+#
+# This file is part of SNAP. SNAP is free software: you can
+# redistribute it and/or modify it under the terms of the
+# GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
-#
+#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
-#
+#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
#
-"""
+'''
Created on Nov 24, 2016
@author: heikok
-"""
+'''
import datetime
import re
+import unittest
import xml.etree.ElementTree as ET
-class NppRun:
- """
+class NppRun():
+ '''
npp-run definition handling, e.g. reading from a xml-file
- """
-
+ '''
root = None
xmlFile = None
outputDir = None
- runTimeHours = 0.0
+ runTimeHours = 0.
def __init__(self, xmlFile):
- """
+ '''
Open a npp by reading a npp.xml file. Might raise several Exceptions.
- """
+ '''
self.xmlFile = xmlFile
tree = ET.parse(xmlFile)
self.root = tree.getroot()
- assert (
- self.root.tag == "npp_emission_run"
- ), "not a npp_emission_run file: {}".format(xmlFile)
- self.outputDir = self.root.attrib["output_directory"]
- self.runTimeHours = float(self.root.attrib["run_time_hours"])
+ assert self.root.tag == 'npp_emission_run', \
+ "not a npp_emission_run file: {}".format(xmlFile)
+ self.outputDir = self.root.attrib['output_directory']
+ self.runTimeHours = float(self.root.attrib['run_time_hours'])
# raise eventual xml-parsing errors already in init
self.get_columnsource_emission()
self.get_columnsource_location()
@@ -55,85 +54,79 @@ def __init__(self, xmlFile):
self.run_as_restart()
def get_meteo_dates(self):
- """Returns (reference_date, model_start_time) of the meteorology,
- where reference_date is a string (best) and model_start_time a datetime object"""
+ '''Returns (reference_date, model_start_time) of the meteorology,
+ where reference_date is a string (best) and model_start_time a datetime object'''
weather = self.root.find("model_setup/weather_forecast")
- return (
- weather.attrib["reference_date"],
- datetime.datetime.strptime(
- weather.attrib["model_start_time"], "%Y-%m-%dT%H:%M:%SZ"
- ),
- )
+ return (weather.attrib["reference_date"], \
+ datetime.datetime.strptime(weather.attrib["model_start_time"],"%Y-%m-%dT%H:%M:%SZ"))
+
def get_columnsource_location(self):
- """get a string used within a eemep columnsource_location.csv file, e.g.
+ '''get a string used within a eemep columnsource_location.csv file, e.g.
#NUMBER,NAME,LOCATION,LATITUDE,NS,LONGITUDE,EW,ELEV,TYPE,ERUPTION TYPE
V1702A02B,Bardarbunga,Iceland-NE,64.63,N,17.53,W,2009,Stratovolcano,M0
- """
+ '''
volc = self.root.find("npp")
defs = {}
- defs["lat"] = float(volc.attrib["lat"])
- defs["north"] = "N"
- if defs["lat"] < 0:
- defs["north"] = "S"
+ defs["lat"] = float(volc.attrib['lat'])
+ defs["north"] = 'N'
+ if (defs["lat"] < 0):
+ defs["north"] = 'S'
defs["lat"] = defs["lat"] * -1
- defs["lon"] = float(volc.attrib["lon"])
- defs["east"] = "E"
- if defs["lon"] < 0:
- defs["east"] = "W"
+ defs["lon"] = float(volc.attrib['lon'])
+ defs["east"] = 'E'
+ if (defs["lon"] < 0):
+ defs["east"] = 'W'
defs["lon"] = defs["lon"] * -1
- name = volc.attrib["name"]
- defs["name"] = re.sub(r"[\W,]", "", name)
- defs["altitude"] = int(volc.attrib["altitude"])
-
- desc = (
- "#NUMBER,NAME,LOCATION,LATITUDE,NS,LONGITUDE,EW,ELEV,TYPE,ERUPTION TYPE\n"
- )
- definition = (
- "NUC,{name},XXX,{lat},{north},{lon},{east},{altitude},xxx,NUC\n".format(
- **defs
- )
- )
-
- self.latitude = defs["lat"]
- self.longitude = defs["lon"]
+ name = volc.attrib['name']
+ defs["name"] = re.sub(r'[\W,]', '', name)
+ defs["altitude"] = int(volc.attrib['altitude'])
+
+ desc = "#NUMBER,NAME,LOCATION,LATITUDE,NS,LONGITUDE,EW,ELEV,TYPE,ERUPTION TYPE\n"
+ definition = "NUC,{name},XXX,{lat},{north},{lon},{east},{altitude},xxx,NUC\n".format(**defs)
+ self.latitude = defs['lat']
+ self.longitude = defs ['lon']
+
return desc + definition
+
def get_columnsource_emission(self):
- """get a string used within a eemep columnsource_emission.csv file, e.g.
+ '''get a string used within a eemep columnsource_emission.csv file, e.g.
#TYPE/NPP,VARIABLE,BASE[km],H[km above vent],D[h],dM/dt[kBq/s],m63[-],START[code/date],END[code/date],DESCRIPTION
NUC,,VENT, 10.000, 96, 1987149.0, 0.05,2016-10-11 05:00:00,SE+D, no description
- """
+ '''
desc = "#TYPE/NPP,VARIABLE,BASE[km],H[km above vent],D[h],dM/dt[kBq/s],m63[-],START[code/date],END[code/date],DESCRIPTION\n"
definition = "NUC,NPP_{component},{bottom}, {top}, {duration}, {rate}, 1.0,{startdate},{enddate}, no description\n"
out = [desc]
- for emis in self.root.findall("emissions/emission"):
+ for emis in self.root.findall('emissions/emission'):
defs = {}
- defs["top"] = float(emis.attrib["top"]) / 1000.0
- defs["bottom"] = float(emis.attrib["bottom"]) / 1000.0
-
- defs["rate"] = int(float(emis.attrib["rate"]))
- start = datetime.datetime.strptime(
- emis.attrib["start"], "%Y-%m-%dT%H:%M:%SZ"
- )
- end = datetime.datetime.strptime(emis.attrib["end"], "%Y-%m-%dT%H:%M:%SZ")
+ defs["top"] = float(emis.attrib['top'])/1000.
+ defs["bottom"] = float(emis.attrib['bottom'])/1000.
+
+ defs["rate"] = int(float(emis.attrib['rate']))
+ start = datetime.datetime.strptime(emis.attrib['start'], '%Y-%m-%dT%H:%M:%SZ')
+ end = datetime.datetime.strptime(emis.attrib['end'], '%Y-%m-%dT%H:%M:%SZ')
defs["component"] = emis.attrib["component"]
- defs["duration"] = (end - start).total_seconds() / (60.0 * 60.0)
- defs["startdate"] = start.strftime("%Y-%m-%d %H:%M:%S")
- defs["enddate"] = end.strftime("%Y-%m-%d %H:%M:%S")
+ defs["duration"] = (end-start).total_seconds() / (60.*60.)
+ defs["startdate"] = start.strftime('%Y-%m-%d %H:%M:%S')
+ defs["enddate"] = end.strftime('%Y-%m-%d %H:%M:%S')
out.append(definition.format(**defs))
assert len(out) > 1, "no emission found"
- return "".join(out)
+ return ''.join(out)
def run_as_restart(self):
- model_run = self.root.find("model_setup[@use_restart_file]")
- if model_run.attrib["use_restart_file"] == "restart":
+ model_run = self.root.find('model_setup[@use_restart_file]')
+ if (model_run.attrib["use_restart_file"] == "restart"):
return True
return False
+
+
+
+
diff --git a/utils/SnapPy/Snappy/EEMEP/PostProcess.py b/utils/SnapPy/Snappy/EEMEP/PostProcess.py
index ef545bb9..b0dd3125 100644
--- a/utils/SnapPy/Snappy/EEMEP/PostProcess.py
+++ b/utils/SnapPy/Snappy/EEMEP/PostProcess.py
@@ -6,77 +6,76 @@
import math
import netCDF4
+import numpy as np
from Snappy.EEMEP.SixHourMax import SixHourMax
from Snappy.Isotopes import Isotopes
from Snappy.AddToa import add_toa_to_nc
-class StderrLogger:
+class StderrLogger():
def _write_log(self, msg):
print(msg, file=sys.stderr)
-
def _get_isotope_setup(isotope, type):
- """
- retrieve an object describing the isotope of the type=WDEP,DDEP,CONC giving
- need_decay # True: decay needed between accumulation steps
- acc_only # True: only accumulated file in output
- decayrate # decay-rate factor
- eemep_name # name in eemep
- snap_name # name in snap without accumulation
- snap_acc_name # name in snap with accumulation
- units # units without accumulation
- units_acc # units with accumulation
- """
- retval = {
- "need_decay": False,
- "acc_only": True,
- "decayrate": 0,
- "eemep_name": None,
- "snap_name": None,
- "units_acc": None,
- "units": None,
- }
- if type == "CONC":
- retval["eemep_name"] = f"SURF_uBq_NPP_{isotope}"
- retval["snap_acc_name"] = f"{isotope}_acc_concentration"
- retval["acc_only"] = True
- retval["decay_needed"] = False
- retval["units_acc"] = "uBq*hr/m3"
- elif type == "WDEP":
- retval["eemep_name"] = f"WDEP_NPP_{isotope}"
- retval["snap_acc_name"] = f"{isotope}_acc_wet_deposition"
- retval["snap_name"] = f"{isotope}_wet_deposition"
- retval["acc_only"] = False
- retval["decay_needed"] = True
- retval["decayrate"] = 1 # TBD
- retval["units"] = "mBq/m2"
- retval["units_acc"] = "mBq/m2" # no *hr, since this is what is on ground
- elif type == "DDEP":
- retval["eemep_name"] = f"DDEP_NPP_{isotope}_m2Grid"
- retval["snap_acc_name"] = f"{isotope}_acc_dry_deposition"
- retval["snap_name"] = f"{isotope}_dry_deposition"
- retval["acc_only"] = False
- retval["decay_needed"] = True
- retval["decayrate"] = 1 # TBD
- retval["units"] = "mBq/m2"
- retval["units_acc"] = "mBq/m2"
+ '''
+ retrieve an object describing the isotope of the type=WDEP,DDEP,CONC giving
+ need_decay # True: decay needed between accumulation steps
+ acc_only # True: only accumulated file in output
+ decayrate # decay-rate factor
+ eemep_name # name in eemep
+ snap_name # name in snap without accumulation
+ snap_acc_name # name in snap with accumulation
+ units # units without accumulation
+ units_acc # units with accumulation
+ '''
+ retval = {'need_decay': False,
+ 'acc_only': True,
+ 'decayrate': 0,
+ 'eemep_name': None,
+ 'snap_name': None,
+ 'units_acc': None,
+ 'units': None}
+ if type == 'CONC':
+ retval['eemep_name'] = f'SURF_uBq_NPP_{isotope}'
+ retval['snap_acc_name'] = f'{isotope}_acc_concentration'
+ retval['acc_only'] = True
+ retval['decay_needed'] = False
+ retval['units_acc'] = 'uBq*hr/m3'
+ elif type == 'WDEP':
+ retval['eemep_name'] = f'WDEP_NPP_{isotope}'
+ retval['snap_acc_name'] = f'{isotope}_acc_wet_deposition'
+ retval['snap_name'] = f'{isotope}_wet_deposition'
+ retval['acc_only'] = False
+ retval['decay_needed'] = True
+ retval['decayrate'] = 1 # TBD
+ retval['units'] = 'mBq/m2'
+ retval['units_acc'] = 'mBq/m2' # no *hr, since this is what is on ground
+ elif type == 'DDEP':
+ retval['eemep_name'] = f'DDEP_NPP_{isotope}_m2Grid'
+ retval['snap_acc_name'] = f'{isotope}_acc_dry_deposition'
+ retval['snap_name'] = f'{isotope}_dry_deposition'
+ retval['acc_only'] = False
+ retval['decay_needed'] = True
+ retval['decayrate'] = 1 # TBD
+ retval['units'] = 'mBq/m2'
+ retval['units_acc'] = 'mBq/m2'
else:
- raise Exception(f"wrong type: {type}")
+ raise Exception(f'wrong type: {type}')
return retval
-class PostProcess:
- """Run the postprocessing, usage:
- pp = PostProcess('.', datetime.now())
- pp.convert_files('eemep_hourInst.nc', 'eemep_hour.nc')"""
+class PostProcess():
+ ''' Run the postprocessing, usage:
+pp = PostProcess('.', datetime.now())
+pp.convert_files('eemep_hourInst.nc', 'eemep_hour.nc')
+'''
def __init__(self, path, timestamp, logger=None):
- """Initialize the Postprocess, but run nothing.
+ '''Initialize the Postprocess, but run nothing.
logger is a class with a _write_log(msg) method.
- """
+ '''
self.timestamp = timestamp
self.path = path
@@ -86,31 +85,30 @@ def __init__(self, path, timestamp, logger=None):
self.logger = StderrLogger()
def convert_files(self, instantFilename, averageFilename):
- """Run the postprocessing on the instant- and averageFilename files"""
+ '''Run the postprocessing on the instant- and averageFilename files'''
# rename files, make them available to further processes
timestamp = self.timestamp.strftime("%Y%m%dT%H%M%S")
simulationstart = self.timestamp.strftime("%Y-%m-%d_%H:%M:%S")
self.logger._write_log("postprocessing {}".format(instantFilename))
- with netCDF4.Dataset(os.path.join(self.path, instantFilename), "a") as nc:
- nc.setncattr("SIMULATION_START_DATE", simulationstart)
-
- self.logger._write_log(
- "postprocessing (adding 6h_vmax) {}".format(averageFilename)
- )
- with netCDF4.Dataset(os.path.join(self.path, averageFilename), "a") as nc:
- nc.setncattr("SIMULATION_START_DATE", simulationstart)
- nc["time"][:] += 0.5 / 24.0 # add half an hour as 'days since'
+ with netCDF4.Dataset(os.path.join(self.path, instantFilename), 'a') as nc:
+ nc.setncattr('SIMULATION_START_DATE', simulationstart)
+
+ self.logger._write_log("postprocessing (adding 6h_vmax) {}".format(averageFilename))
+ with netCDF4.Dataset(os.path.join(self.path, averageFilename), 'a') as nc:
+ nc.setncattr('SIMULATION_START_DATE', simulationstart)
+ nc['time'][:] += (0.5 / 24.) # add half an hour as 'days since'
SixHourMax(nc)
- newInstFile = os.path.join(self.path, "eemep_hourInst_{}.nc".format(timestamp))
- newAvgFile = os.path.join(self.path, "eemep_hour_{}.nc".format(timestamp))
- self.logger._write_log(
- "making files available as {} and {}".format(newInstFile, newAvgFile)
- )
+ newInstFile = os.path.join(self.path, 'eemep_hourInst_{}.nc'.format(timestamp))
+ newAvgFile = os.path.join(self.path, 'eemep_hour_{}.nc'.format(timestamp))
+ self.logger._write_log("making files available as {} and {}".format(newInstFile, newAvgFile))
os.rename(instantFilename, newInstFile)
os.rename(averageFilename, newAvgFile)
+
+
+
def accumulate_and_toa_nuc_files(self, instantFilename, averageFilename) -> None:
self.logger._write_log(f"Accumulating nuclear outputs found in {self.path}")
# rename files, make them available to further processes
@@ -119,95 +117,78 @@ def accumulate_and_toa_nuc_files(self, instantFilename, averageFilename) -> None
dir = Path(self.path)
- new_filename = dir / "eemep_nuc_{}.nc".format(timestamp)
+ new_filename = dir / 'eemep_nuc_{}.nc'.format(timestamp)
nc_path = dir / instantFilename
shutil.copy(nc_path, new_filename)
isotopes = set()
- with netCDF4.Dataset(new_filename, "a") as nc:
- nc.setncattr("SIMULATION_START_DATE", simulationstart)
+ with netCDF4.Dataset(new_filename, 'a') as nc:
+ nc.setncattr('SIMULATION_START_DATE', simulationstart)
for varname, var in nc.variables.items():
- if varname.startswith("SURF_uBq_NPP_"):
+ if varname.startswith('SURF_uBq_NPP_'):
isotopes.add(var.name[13:])
# rename variables in copied instantFile
for isotope in isotopes:
- varname = f"SURF_uBq_NPP_{isotope}"
- newvar = f"{isotope}_concentration"
+ varname = f'SURF_uBq_NPP_{isotope}'
+ newvar = f'{isotope}_concentration'
if varname in nc.variables:
self.logger._write_log(f"renaming {varname} to {newvar}")
# nc.renameVariabe(varname, newvar) # bug in renameVariable -> just create a copy for now
var = nc[varname]
- nvar = nc.createVariable(
- newvar,
- var.datatype,
- dimensions=var.dimensions,
- zlib=True,
- complevel=1,
- )
+ nvar = nc.createVariable(newvar, var.datatype, dimensions=var.dimensions,
+ zlib=True, complevel=1)
nvar.setncatts(var.__dict__)
- for t in range(0, nc["time"].shape[0]):
- nvar[t, :] = var[t, :]
-
+ for t in range(0, nc['time'].shape[0]):
+ nvar[t,:] = var[t,:]
+
+
# copy data from averageFile
- with netCDF4.Dataset(dir / averageFilename, "r") as ncAvg:
+ with netCDF4.Dataset(dir / averageFilename, 'r') as ncAvg:
for isotope in isotopes:
- for type in ("CONC", "WDEP", "DDEP"):
+ for type in ('CONC', 'WDEP', 'DDEP'):
isosetup = _get_isotope_setup(isotope, type)
- self.logger._write_log(
- f"fix output for {isotope}, {type}, {isosetup}"
- )
- varname = isosetup["eemep_name"]
- newaccvar = isosetup["snap_acc_name"]
- newvar = isosetup["snap_name"]
+ self.logger._write_log(f"fix output for {isotope}, {type}, {isosetup}")
+ varname = isosetup['eemep_name']
+ newaccvar = isosetup['snap_acc_name']
+ newvar = isosetup['snap_name']
if varname in ncAvg.variables:
var = ncAvg[varname]
- naccvar = nc.createVariable(
- newaccvar,
- var.datatype,
- dimensions=var.dimensions,
- zlib=True,
- complevel=1,
- )
+ naccvar = nc.createVariable(newaccvar, var.datatype, dimensions=var.dimensions,
+ zlib=True, complevel=1)
# copy variable attributes all at once via dictionary
naccvar.setncatts(var.__dict__)
- naccvar.units = isosetup["units_acc"]
- if not isosetup["acc_only"]:
- nvar = nc.createVariable(
- newvar,
- var.datatype,
- dimensions=var.dimensions,
- zlib=True,
- complevel=1,
- )
+ naccvar.units = isosetup['units_acc']
+ if not isosetup['acc_only']:
+ nvar = nc.createVariable(newvar, var.datatype, dimensions=var.dimensions,
+ zlib=True, complevel=1)
# copy variable attributes all at once via dictionary
nvar.setncatts(var.__dict__)
- nvar.units = isosetup["units"]
-
- times = ncAvg["time"][:]
- dates = netCDF4.num2date(times, nc["time"].units)
- data = var[0, :]
- naccvar[0, :] = data
- if not isosetup["acc_only"]:
- nvar[0, :] = data
- for t in range(1, nc["time"].shape[0]):
- data = var[t, :]
- if not isosetup["acc_only"]:
- nvar[t, :] = data
- if isosetup["decay_needed"]:
- decayrate = Isotopes().byName(isotope)["decay"]
- secs = (dates[t] - dates[t - 1]).total_seconds()
- decayfactor = math.exp(-1 * decayrate * secs)
+ nvar.units = isosetup['units']
+
+ times = ncAvg['time'][:]
+ dates = netCDF4.num2date(times, nc['time'].units)
+ data = var[0,:]
+ naccvar[0,:] = data
+ if not isosetup['acc_only']:
+ nvar[0,:] = data
+ for t in range(1, nc['time'].shape[0]):
+ data = var[t,:]
+ if not isosetup['acc_only']:
+ nvar[t,:] = data
+ if isosetup['decay_needed']:
+ decayrate = Isotopes().byName(isotope)['decay']
+ secs = (dates[t] - dates[t-1]).total_seconds()
+ decayfactor = math.exp(-1*decayrate*secs)
else:
decayfactor = 1
- naccvar[t, :] = naccvar[t - 1, :] * decayfactor + data
+ naccvar[t,:] = naccvar[t-1,:]*decayfactor + data
# sync after each variable
nc.sync()
# file is now very like snap-output, so possible to add toa
- self.logger._write_log("adding toa and totals to output")
+ self.logger._write_log('adding toa and totals to output')
add_toa_to_nc(nc)
-
-if __name__ == "__main__":
- pp = PostProcess(".", datetime.datetime.now())
- # pp.convert_files('eemep_hourInst.nc', 'eemep_hour.nc')
- pp.accumulate_and_toa_nuc_files("eemep_hourInst.nc", "eemep_hour.nc")
+if __name__ == '__main__':
+ pp = PostProcess('.', datetime.datetime.now())
+ #pp.convert_files('eemep_hourInst.nc', 'eemep_hour.nc')
+ pp.accumulate_and_toa_nuc_files('eemep_hourInst.nc', 'eemep_hour.nc')
diff --git a/utils/SnapPy/Snappy/EEMEP/Resources.py b/utils/SnapPy/Snappy/EEMEP/Resources.py
index 16d5eef2..df713a49 100644
--- a/utils/SnapPy/Snappy/EEMEP/Resources.py
+++ b/utils/SnapPy/Snappy/EEMEP/Resources.py
@@ -175,7 +175,7 @@ def readVolcanoes(
volcano["LONGITUDE"] *= -1
try:
volcano["ELEV"] = float(volcano["ELEV"])
- except Exception:
+ except:
volcano["ELEV"] = 0.0
if volcano["NAME"] == "Unnamed":
volcano["NAME"] = "_"
diff --git a/utils/SnapPy/Snappy/EEMEP/SixHourMax.py b/utils/SnapPy/Snappy/EEMEP/SixHourMax.py
index fc14b916..f179c44f 100644
--- a/utils/SnapPy/Snappy/EEMEP/SixHourMax.py
+++ b/utils/SnapPy/Snappy/EEMEP/SixHourMax.py
@@ -1,108 +1,102 @@
# SNAP: Servere Nuclear Accident Programme
# Copyright (C) 1992-2018 Norwegian Meteorological Institute
-#
-# This file is part of SNAP. SNAP is free software: you can
-# redistribute it and/or modify it under the terms of the
-# GNU General Public License as published by the
+#
+# This file is part of SNAP. SNAP is free software: you can
+# redistribute it and/or modify it under the terms of the
+# GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
-#
+#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
-#
+#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
#
-"""
+'''
Created on Feb 12, 2018
@author: heikok
-"""
+'''
import math
import numpy as np
from netCDF4 import Dataset
+import datetime
from collections import deque
-
def flightlevel_in_pressure(flightlevel):
- """convert flightlevel (in 100 ft) to pressure in hPa using the international height formula"""
- h = flightlevel * 100 * 0.3048 # feet to meter
- p = 1013.25 * math.pow(1 - (0.0065 * h) / 288.15, 5.255) # international formula
+ '''convert flightlevel (in 100 ft) to pressure in hPa using the international height formula'''
+ h = flightlevel * 100 * 0.3048 # feet to meter
+ p = 1013.25 * math.pow(1-(0.0065*h)/288.15,5.255) # international formula
return p
-
class SixHourMax:
- """Calculate the 6h max VAAC output from 3D eemep-hourly mean output
+ '''Calculate the 6h max VAAC output from 3D eemep-hourly mean output
- i.e. calculate the 6hour mean of the last six hours (running) (average also the surface pressure)
- retrieve the max value within the fat flight layers (FL == atmospheric pressure altitude above 1013.25hPa) (0-200, 200-350, 350-550)
- """
+i.e. calculate the 6hour mean of the last six hours (running) (average also the surface pressure)
+ retrieve the max value within the fat flight layers (FL == atmospheric pressure altitude above 1013.25hPa) (0-200, 200-350, 350-550)
+ '''
FL = (200, 350, 550)
-
+
def __init__(self, nc):
- """Initialize with Dataset nc"""
+ '''Initialize with Dataset nc'''
pFL = [flightlevel_in_pressure(x) for x in SixHourMax.FL]
-
+
# ap and b at layer interface, from troposphere to surface
- v200 = nc.createVariable(
- "MAX6h_ASH_fl000-200", "f4", ("time", "lat", "lon"), zlib=True
- )
- v350 = nc.createVariable(
- "MAX6h_ASH_fl200-350", "f4", ("time", "lat", "lon"), zlib=True
- )
- v550 = nc.createVariable(
- "MAX6h_ASH_fl350-550", "f4", ("time", "lat", "lon"), zlib=True
- )
-
- v200.units = nc["D3_ug_ASH"].units
- v350.units = nc["D3_ug_ASH"].units
- v550.units = nc["D3_ug_ASH"].units
-
- hyai = nc["hyai"][:] # in hPa
- hybi = nc["hybi"][:]
-
- time = nc["time"][:]
- lev = nc["lev"][:]
+ v200 = nc.createVariable('MAX6h_ASH_fl000-200','f4',('time','lat','lon'), zlib=True)
+ v350 = nc.createVariable('MAX6h_ASH_fl200-350','f4',('time','lat','lon'), zlib=True)
+ v550 = nc.createVariable('MAX6h_ASH_fl350-550','f4',('time','lat','lon'), zlib=True)
+
+ v200.units = nc['D3_ug_ASH'].units
+ v350.units = nc['D3_ug_ASH'].units
+ v550.units = nc['D3_ug_ASH'].units
+
+ hyai = nc['hyai'][:] # in hPa
+ hybi = nc['hybi'][:]
+
+ time = nc['time'][:]
+ lev = nc['lev'][:]
ps = deque(maxlen=6)
ash = deque(maxlen=6)
for t in range(len(time)):
- ash.append(nc["D3_ug_ASH"][t, :, :, :])
- ps.append(nc["PS"][t, :, :])
+ ash.append(nc['D3_ug_ASH'][t,:,:,:])
+ ps.append(nc['PS'][t,:,:])
pa = sum(ps) / len(ps)
asha = sum(ash) / len(ash)
-
- max200 = np.zeros(pa.shape, dtype="float")
- max350 = np.zeros(pa.shape, dtype="float")
- max550 = np.zeros(pa.shape, dtype="float")
-
- for il in range(len(lev)):
+
+ max200 = np.zeros(pa.shape, dtype='float')
+ max350 = np.zeros(pa.shape, dtype='float')
+ max550 = np.zeros(pa.shape, dtype='float')
+
+
+ for l in range(len(lev)):
# 6h mean of ash in level l
- ashal = np.squeeze(asha[il, :, :])
- pal = pa * hybi[il] + hyai[il]
+ ashal = np.squeeze(asha[l,:,:])
+ pal = pa * hybi[l] + hyai[l]
tmp = np.where((pFL[0] < pal), ashal, 0)
max200 = np.maximum(tmp, max200)
-
- tmp = np.where((pFL[0] >= pal) & (pFL[1] < pal), ashal, 0)
+
+ tmp = np.where((pFL[0] >= pal) & (pFL[1] < pal), ashal, 0)
max350 = np.maximum(tmp, max350)
-
- tmp = np.where((pFL[1] >= pal) & (pFL[2] < pal), ashal, 0)
+
+ tmp = np.where((pFL[1] >= pal) & (pFL[2] < pal), ashal, 0)
max550 = np.maximum(tmp, max550)
- # print(np.info(max200))
- # print(datetime.datetime.now())
- v200[t, :] = max200 * 10.0
- v350[t, :] = max350 * 10.0
- v550[t, :] = max550 * 10.0
+ #print(np.info(max200))
+ #print(datetime.datetime.now())
+ v200[t,:] = max200 * 10.
+ v350[t,:] = max350 * 10.
+ v550[t,:] = max550 * 10.
nc.sync()
-if __name__ == "__main__":
- print("FL180=", flightlevel_in_pressure(180), "should result in ~ 500hPa")
- # import cProfile
- with Dataset("/disk1/Fimex/eemep_hour.nc", "a") as nc:
+if __name__ == '__main__':
+ print ('FL180=',flightlevel_in_pressure(180),'should result in ~ 500hPa')
+# import cProfile
+ with Dataset('/disk1/Fimex/eemep_hour.nc', 'a') as nc:
SixHourMax(nc)
# cProfile.run("SixHourMax('/lustre/storeB/project/fou/kl/eva/eemep/runs/Jan_Mayen_ondemand/eemep_hour.nc')")
diff --git a/utils/SnapPy/Snappy/EEMEP/VolcanoRun.py b/utils/SnapPy/Snappy/EEMEP/VolcanoRun.py
index 4d8fab8b..05380728 100644
--- a/utils/SnapPy/Snappy/EEMEP/VolcanoRun.py
+++ b/utils/SnapPy/Snappy/EEMEP/VolcanoRun.py
@@ -1,25 +1,25 @@
# SNAP: Servere Nuclear Accident Programme
# Copyright (C) 1992-2017 Norwegian Meteorological Institute
-#
-# This file is part of SNAP. SNAP is free software: you can
-# redistribute it and/or modify it under the terms of the
-# GNU General Public License as published by the
+#
+# This file is part of SNAP. SNAP is free software: you can
+# redistribute it and/or modify it under the terms of the
+# GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
-#
+#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
-#
+#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
#
-"""
+'''
Created on Nov 24, 2016
@author: heikok
-"""
+'''
import datetime
import re
import unittest
@@ -27,28 +27,26 @@
import xml.etree.ElementTree as ET
-class VolcanoRun:
- """
+class VolcanoRun():
+ '''
volcano-run definition handling, e.g. reading from a xml-file
- """
-
+ '''
root = None
xmlFile = None
outputDir = None
- runTimeHours = 0.0
+ runTimeHours = 0.
def __init__(self, xmlFile):
- """
+ '''
Open a volcano by reading a volcano.xml file. Might raise several Exceptions.
- """
+ '''
self.xmlFile = xmlFile
tree = ET.parse(xmlFile)
self.root = tree.getroot()
- assert (
- self.root.tag == "volcanic_eruption_run"
- ), "not a volcanic_eruption_run file: {}".format(xmlFile)
- self.outputDir = self.root.attrib["output_directory"]
- self.runTimeHours = float(self.root.attrib["run_time_hours"])
+ assert self.root.tag == 'volcanic_eruption_run', \
+ "not a volcanic_eruption_run file: {}".format(xmlFile)
+ self.outputDir = self.root.attrib['output_directory']
+ self.runTimeHours = float(self.root.attrib['run_time_hours'])
# raise eventual xml-parsing errors already in init
self.get_columnsource_emission()
self.get_columnsource_location()
@@ -56,93 +54,84 @@ def __init__(self, xmlFile):
self.run_as_restart()
def get_meteo_dates(self):
- """Returns (reference_date, model_start_time) of the meteorology,
- where reference_date is a string (best) and model_start_time a datetime object"""
+ '''Returns (reference_date, model_start_time) of the meteorology,
+ where reference_date is a string (best) and model_start_time a datetime object'''
weather = self.root.find("model_setup/weather_forecast")
- return (
- weather.attrib["reference_date"],
- datetime.datetime.strptime(
- weather.attrib["model_start_time"], "%Y-%m-%dT%H:%M:%SZ"
- ),
- )
+ return (weather.attrib["reference_date"], \
+ datetime.datetime.strptime(weather.attrib["model_start_time"],"%Y-%m-%dT%H:%M:%SZ"))
+
def get_columnsource_location(self):
- """get a string used within a eemep columnsource_location.csv file, e.g.
+ '''get a string used within a eemep columnsource_location.csv file, e.g.
#NUMBER,NAME,LOCATION,LATITUDE,NS,LONGITUDE,EW,ELEV,TYPE,ERUPTION TYPE
V1702A02B,Bardarbunga,Iceland-NE,64.63,N,17.53,W,2009,Stratovolcano,M0
- """
+ '''
volc = self.root.find("volcano")
defs = {}
- defs["lat"] = float(volc.attrib["lat"])
- defs["north"] = "N"
- if defs["lat"] < 0:
- defs["north"] = "S"
+ defs["lat"] = float(volc.attrib['lat'])
+ defs["north"] = 'N'
+ if (defs["lat"] < 0):
+ defs["north"] = 'S'
defs["lat"] = defs["lat"] * -1
- defs["lon"] = float(volc.attrib["lon"])
- defs["east"] = "E"
- if defs["lon"] < 0:
- defs["east"] = "W"
+ defs["lon"] = float(volc.attrib['lon'])
+ defs["east"] = 'E'
+ if (defs["lon"] < 0):
+ defs["east"] = 'W'
defs["lon"] = defs["lon"] * -1
- name = volc.attrib["name"]
- defs["name"] = re.sub(r"[\W,]", "", name)
- defs["altitude"] = int(volc.attrib["altitude"])
-
- desc = (
- "#NUMBER,NAME,LOCATION,LATITUDE,NS,LONGITUDE,EW,ELEV,TYPE,ERUPTION TYPE\n"
- )
- definition = (
- "ASH,{name},XXX,{lat},{north},{lon},{east},{altitude},xxx,M0\n".format(
- **defs
- )
- )
-
- self.latitude = defs["lat"]
- self.longitude = defs["lon"]
+ name = volc.attrib['name']
+ defs["name"] = re.sub(r'[\W,]', '', name)
+ defs["altitude"] = int(volc.attrib['altitude'])
+
+ desc = "#NUMBER,NAME,LOCATION,LATITUDE,NS,LONGITUDE,EW,ELEV,TYPE,ERUPTION TYPE\n"
+ definition = "ASH,{name},XXX,{lat},{north},{lon},{east},{altitude},xxx,M0\n".format(**defs)
+ self.latitude = defs['lat']
+ self.longitude = defs ['lon']
+
return desc + definition
+
def get_columnsource_emission(self):
- """get a string used within a eemep columnsource_emission.csv file, e.g.
+ '''get a string used within a eemep columnsource_emission.csv file, e.g.
#TYPE/NPP,VARIABLE,BASE[km],H[km above vent],D[h],dM/dt[kBq/s],m63[-],START[code/date],END[code/date],DESCRIPTION
M0,,VENT, 10.000, 96, 1987149.0, 0.05,2016-10-11 05:00:00,SE+D, no description
- """
+ '''
desc = "#TYPE/NPP,VARIABLE,BASE[km],H[km above vent],D[h],dM/dt[kBq/s],m63[-],START[code/date],END[code/date],DESCRIPTION\n"
definition = "M0,,VENT, {height}, {duration}, {rate}, {m63},{startdate},{enddate}, no description\n"
out = [desc]
- for erup in self.root.findall("eruptions/eruption"):
+ for erup in self.root.findall('eruptions/eruption'):
defs = {}
- defs["height"] = float(erup.attrib["top"]) / 1000.0
- defs["rate"] = int(erup.attrib["rate"])
- defs["m63"] = float(erup.attrib["m63"])
- start = datetime.datetime.strptime(
- erup.attrib["start"], "%Y-%m-%dT%H:%M:%SZ"
- )
- end = datetime.datetime.strptime(erup.attrib["end"], "%Y-%m-%dT%H:%M:%SZ")
- defs["duration"] = (end - start).total_seconds() / (60.0 * 60.0)
- defs["startdate"] = start.strftime("%Y-%m-%d %H:%M:%S")
- defs["enddate"] = end.strftime("%Y-%m-%d %H:%M:%S")
+ defs["height"] = float(erup.attrib['top'])/1000.
+ defs["rate"] = int(erup.attrib['rate'])
+ defs["m63"] = float(erup.attrib['m63'])
+ start = datetime.datetime.strptime(erup.attrib['start'], '%Y-%m-%dT%H:%M:%SZ')
+ end = datetime.datetime.strptime(erup.attrib['end'], '%Y-%m-%dT%H:%M:%SZ')
+ defs["duration"] = (end-start).total_seconds() / (60.*60.)
+ defs["startdate"] = start.strftime('%Y-%m-%d %H:%M:%S')
+ defs["enddate"] = end.strftime('%Y-%m-%d %H:%M:%S')
out.append(definition.format(**defs))
assert len(out) > 1, "no eruptions found"
- return "".join(out)
+ return ''.join(out)
def run_as_restart(self):
- model_run = self.root.find("model_setup[@use_restart_file]")
- if model_run.attrib["use_restart_file"] == "restart":
+ model_run = self.root.find('model_setup[@use_restart_file]')
+ if (model_run.attrib["use_restart_file"] == "restart"):
return True
return False
class TestVolcanoRun(unittest.TestCase):
+
def setUp(self):
import os
-
unittest.TestCase.setUp(self)
- self.volcFile = os.path.join(os.path.dirname(__file__), "test", "volcano.xml")
+ self.volcFile = os.path.join(os.path.dirname(__file__),"test", "volcano.xml")
+
def test_init(self):
VolcanoRun(self.volcFile)
@@ -155,21 +144,17 @@ def test_meteo(self):
def test_location(self):
volc = VolcanoRun(self.volcFile)
- expected = """#NUMBER,NAME,LOCATION,LATITUDE,NS,LONGITUDE,EW,ELEV,TYPE,ERUPTION TYPE
+ expected = '''#NUMBER,NAME,LOCATION,LATITUDE,NS,LONGITUDE,EW,ELEV,TYPE,ERUPTION TYPE
V1702A02B,Askja,XXX,65.03,N,16.75,W,1516,xxx,M0
-"""
- self.assertEqual(
- volc.get_columnsource_location(), expected, "columnsource_location"
- )
+'''
+ self.assertEqual(volc.get_columnsource_location(), expected, "columnsource_location")
def test_emission(self):
volc = VolcanoRun(self.volcFile)
- expected = """#TYPE/NPP,VARIABLE,BASE[km],H[km above vent],D[h],dM/dt[kBq/s],m63[-],START[code/date],END[code/date],DESCRIPTION
+ expected = '''#TYPE/NPP,VARIABLE,BASE[km],H[km above vent],D[h],dM/dt[kBq/s],m63[-],START[code/date],END[code/date],DESCRIPTION
M0,,VENT, 7.0, 1.0, 100000, 0.05,2016-11-03 08:00:00,2016-11-03 09:00:00, no description
-"""
- self.assertEqual(
- volc.get_columnsource_emission(), expected, "columnsource_emission"
- )
+'''
+ self.assertEqual(volc.get_columnsource_emission(), expected, "columnsource_emission")
def test_run_restart(self):
volc = VolcanoRun(self.volcFile)
@@ -178,3 +163,6 @@ def test_run_restart(self):
if __name__ == "__main__":
unittest.main()
+
+
+
diff --git a/utils/SnapPy/Snappy/EcMeteorologyCalculator.py b/utils/SnapPy/Snappy/EcMeteorologyCalculator.py
index adc473b0..862f2f57 100644
--- a/utils/SnapPy/Snappy/EcMeteorologyCalculator.py
+++ b/utils/SnapPy/Snappy/EcMeteorologyCalculator.py
@@ -1,29 +1,32 @@
# SNAP: Servere Nuclear Accident Programme
# Copyright (C) 1992-2017 Norwegian Meteorological Institute
-#
-# This file is part of SNAP. SNAP is free software: you can
-# redistribute it and/or modify it under the terms of the
-# GNU General Public License as published by the
+#
+# This file is part of SNAP. SNAP is free software: you can
+# redistribute it and/or modify it under the terms of the
+# GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
-#
+#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
-#
+#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
#
-"""
+'''
Created on Oct 24, 2016
@author: heikok
-"""
+'''
-from datetime import datetime
+from datetime import datetime, timedelta
+from glob import iglob
+import math
import os
import subprocess
+import time
from Snappy.Resources import Resources, MetModel
import Snappy.MeteorologyCalculator
@@ -31,19 +34,17 @@
class ECDataNotAvailableException(Exception):
def __init__(self, value):
- """exception having some kind of documention in args[0]"""
+ '''exception having some kind of documention in args[0]'''
self.parameter = value
-
def __str__(self):
return repr(self.parameter)
class EcMeteorologyCalculator(Snappy.MeteorologyCalculator.MeteorologyCalculator):
- """Calculate ec-meteorology"""
-
+ '''Calculate ec-meteorology'''
@staticmethod
def findECGlobalData(dtime: datetime):
- """Static method to find the closest global ec dataset earlier than dtime.
+ '''Static method to find the closest global ec dataset earlier than dtime.
Args:
dtime: datetime object with a start-time, which should be included in the dataset
@@ -54,73 +55,64 @@ def findECGlobalData(dtime: datetime):
Raises:
ECDataNotAvailableException: no data for the dtime can be found
- """
- return Snappy.MeteorologyCalculator.MeteorologyCalculator.findGlobalData(
- EcMeteorologyCalculator.getGlobalMeteoResources(), dtime
- )
+ '''
+ return Snappy.MeteorologyCalculator.MeteorologyCalculator.findGlobalData(EcMeteorologyCalculator.getGlobalMeteoResources(), dtime)
+
@staticmethod
def getGlobalMeteoResources():
- """retrieve the GlobalMeteoResources from internal resources"""
+ '''retrieve the GlobalMeteoResources from internal resources'''
ecres = Resources()
res = Snappy.MeteorologyCalculator.GlobalMeteoResource()
res.indirs = ecres.getMetGlobalInputDirs(MetModel.NrpaEC0p1Global)
res.pathglob = "ec_atmo_0_1deg_????????T??????Z_3h.nc"
res.pathptime = "ec_atmo_0_1deg_%Y%m%dT%H%M%SZ_3h.nc"
- res.path_grace_period_sec = (
- 2 * 60
- ) # 2min grace to ensure lustre cross-dir mv finishes
+ res.path_grace_period_sec = 2*60 # 2min grace to ensure lustre cross-dir mv finishes
res.outputdir = ecres.getSnapOutputDir()
res.output_filename_pattern = ecres.EC_FILENAME_PATTERN
res.domainHeight = ecres.ecDomainHeight
res.domainWidth = ecres.ecDomainWidth
res.domainDeltaX = ecres.ecDomainRes
res.domainDeltaY = ecres.ecDomainRes
- res.timeoffset = (
- 3 # required offset between reference-time and first useful startup-time
- )
+ res.timeoffset = 3 # required offset between reference-time and first useful startup-time
return res
- # def __init__(self, res: Snappy.MeteorologyCalculator.GlobalMeteoResource, dtime: datetime, domainCenterX, domainCenterY):
- # super(res, dtime, domainCenterX, domainCenterY)
+# def __init__(self, res: Snappy.MeteorologyCalculator.GlobalMeteoResource, dtime: datetime, domainCenterX, domainCenterY):
+# super(res, dtime, domainCenterX, domainCenterY)
def add_expected_files(self, date):
self.files = []
self.optFiles = []
- for i in (0, 1, 2):
- self.files.append(
- os.path.join(
- self.outputdir,
- self.res.output_filename_pattern.format(
- year=date.year, month=date.month, day=date.day, dayoffset=i
- ),
- )
- )
- for i in (3, 4, 5):
- self.optFiles.append(
- os.path.join(
- self.outputdir,
- self.res.output_filename_pattern.format(
- year=date.year, month=date.month, day=date.day, dayoffset=i
- ),
- )
- )
+ for i in (0,1,2):
+ self.files.append(os.path.join(self.outputdir,
+ self.res.output_filename_pattern.format(year=date.year,
+ month=date.month,
+ day=date.day,
+ dayoffset=i)))
+ for i in (3,4,5):
+ self.optFiles.append(os.path.join(self.outputdir,
+ self.res.output_filename_pattern.format(year=date.year,
+ month=date.month,
+ day=date.day,
+ dayoffset=i)))
return
- def calc(self, proc=None):
- """run the calculation of ec-data if required.
+ def calc(self, proc=None):
+ '''run the calculation of ec-data if required.
+
Args:
proc -- A QProcess, which will be used to run a longer process in the background.
STDERR/STDOUT and signal-handler should be set. If proc is None, the
subprocess will be run in the current-process. If proc is set, the caller
- needs to wait for the proc to finish before calling other methods of this object"""
- if not self.must_calc():
+ needs to wait for the proc to finish before calling other methods of this object
+'''
+ if (not self.must_calc()):
return
- # if 'MODULESHOME' not in os.environ:
- # raise ECDataNotAvailableException("unable to load module")
+# if 'MODULESHOME' not in os.environ:
+# raise ECDataNotAvailableException("unable to load module")
- precommand = """#! /bin/bash
+ precommand = '''#! /bin/bash
source /etc/profile.d/modules.sh
release=$(lsb_release --codename --short)
if [[ "$release" == "Ootpa" ]]; then
@@ -145,51 +137,38 @@ def calc(self, proc=None):
echo "Preprocessing 3days EC meteorology, please wait ca. 15min"
ECDIS_PARALLEL=0 NREC_DAY_MIN=2 NDAYS_MAX=3 DOMAIN=VARIABLE LON_DEF={lon0}.,{dlon},{nx} LAT_DEF={lat0}.,{dlat},{ny} ECDIS={globalfile} OUTDIR={outdir} ECDIS_TMPDIR=$WORKDIR $ECDIS_MODULE_PATH/ecdis4cwf.sh
rm {outdir}/running
-"""
- command = precommand.format(
- year=self.date.year,
- month=self.date.month,
- day=self.date.day,
- utc=self.date.hour,
- outdir=self.outputdir,
- lon0=self.lon0,
- lat0=self.lat0,
- dlon=self.res.domainDeltaX,
- dlat=self.res.domainDeltaY,
- nx=round(self.res.domainWidth / self.res.domainDeltaX) + 1,
- ny=round(self.res.domainHeight / self.res.domainDeltaY) + 1,
- globalfile=self.globalfile,
- )
+'''
+ command = precommand.format(year=self.date.year,
+ month=self.date.month,
+ day=self.date.day,
+ utc=self.date.hour,
+ outdir=self.outputdir,
+ lon0=self.lon0,
+ lat0=self.lat0,
+ dlon=self.res.domainDeltaX,
+ dlat=self.res.domainDeltaY,
+ nx=round(self.res.domainWidth/self.res.domainDeltaX)+1,
+ ny=round(self.res.domainHeight/self.res.domainDeltaY)+1,
+ globalfile=self.globalfile,
+ )
scriptFile = os.path.join(self.outputdir, "command.sh")
- with open(scriptFile, "w") as script:
+ with open(scriptFile, 'w') as script:
script.write(command)
if proc is None:
- subprocess.call(["/bin/bash", scriptFile])
+ subprocess.call(['/bin/bash', scriptFile])
else:
- self.proc = proc # make sure proc lives long enough
- proc.start("/bin/bash", [scriptFile])
+ self.proc = proc # make sure proc lives long enough
+ proc.start('/bin/bash', [scriptFile])
return
-
if __name__ == "__main__":
- print(
- EcMeteorologyCalculator.findECGlobalData(
- datetime.strptime("2020-04-29T00", "%Y-%m-%dT%H")
- )
- )
+ print(EcMeteorologyCalculator.findECGlobalData(datetime.strptime("2020-04-29T00", "%Y-%m-%dT%H")))
try:
- EcMeteorologyCalculator.findECGlobalData(
- datetime.strptime("2010-10-24T00", "%Y-%m-%dT%H")
- )
+ EcMeteorologyCalculator.findECGlobalData(datetime.strptime("2010-10-24T00", "%Y-%m-%dT%H"))
except Exception as e:
print(e.args[0])
- # print(EcMeteorologyCalculator(Resources(), datetime.strptime("2016-10-24T00", "%Y-%m-%dT%H"), 63, 42, None))
- ecmet = EcMeteorologyCalculator(
- EcMeteorologyCalculator.getGlobalMeteoResources(),
- datetime.strptime("2020-04-29T00", "%Y-%m-%dT%H"),
- -159,
- 20,
- ) # hawaii
+# print(EcMeteorologyCalculator(Resources(), datetime.strptime("2016-10-24T00", "%Y-%m-%dT%H"), 63, 42, None))
+ ecmet = EcMeteorologyCalculator(EcMeteorologyCalculator.getGlobalMeteoResources(), datetime.strptime("2020-04-29T00", "%Y-%m-%dT%H"), -159, 20) # hawaii
print("recalc: ", ecmet.must_calc())
diff --git a/utils/SnapPy/Snappy/ICONMeteorologyCalculator.py b/utils/SnapPy/Snappy/ICONMeteorologyCalculator.py
index 726c1913..9a97d6be 100644
--- a/utils/SnapPy/Snappy/ICONMeteorologyCalculator.py
+++ b/utils/SnapPy/Snappy/ICONMeteorologyCalculator.py
@@ -1,94 +1,88 @@
# SNAP: Servere Nuclear Accident Programme
# Copyright (C) 1992-2020 Norwegian Meteorological Institute
-#
-# This file is part of SNAP. SNAP is free software: you can
-# redistribute it and/or modify it under the terms of the
-# GNU General Public License as published by the
+#
+# This file is part of SNAP. SNAP is free software: you can
+# redistribute it and/or modify it under the terms of the
+# GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
-#
+#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
-#
+#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
#
-"""
+'''
Created on Oct 24, 2016
@author: heikok
-"""
+'''
-from datetime import datetime
+from datetime import datetime, timedelta
+from glob import iglob
+import math
import os
import subprocess
+import time
from Snappy.Resources import Resources, MetModel
import Snappy.MeteorologyCalculator
-
class ICONMeteorologyCalculator(Snappy.MeteorologyCalculator.MeteorologyCalculator):
- """Calculate dwd icon-meteorology"""
+ '''Calculate dwd icon-meteorology'''
@staticmethod
def getGlobalMeteoResources():
- """retrieve the GlobalMeteoResources from internal resources"""
+ '''retrieve the GlobalMeteoResources from internal resources'''
gres = Resources()
res = Snappy.MeteorologyCalculator.GlobalMeteoResource()
res.indirs = gres.getMetGlobalInputDirs(MetModel.Icon0p25Global)
# icon_20200427T00Z.nc
res.pathglob = "icon_????????T??Z.nc"
res.pathptime = "icon_%Y%m%dT%HZ.nc"
- res.path_grace_period_sec = 0 # files are written atomic (rsync)
+ res.path_grace_period_sec = 0 # files are written atomic (rsync)
res.outputdir = gres.getSnapOutputDir()
- res.output_filename_pattern = gres.MET_FILENAME_PATTERN[
- MetModel.Icon0p25Global
- ] # keeping filename for extracted data
- res.domainHeight = gres.ecDomainHeight # reuse domainHeight/Width
+ res.output_filename_pattern = gres.MET_FILENAME_PATTERN[MetModel.Icon0p25Global] # keeping filename for extracted data
+ res.domainHeight = gres.ecDomainHeight # reuse domainHeight/Width
res.domainWidth = gres.ecDomainWidth
res.domainDeltaX = 0.25
res.domainDeltaY = 0.25
- res.timeoffset = (
- 0 # required offset between reference-time and first useful startup-time
- )
+ res.timeoffset = 0 # required offset between reference-time and first useful startup-time
return res
- # def __init__(self, res: Snappy.MeteorologyCalculator.GlobalMeteoResource, dtime: datetime, domainCenterX, domainCenterY):
- # super(res, dtime, domainCenterX, domainCenterY)
+# def __init__(self, res: Snappy.MeteorologyCalculator.GlobalMeteoResource, dtime: datetime, domainCenterX, domainCenterY):
+# super(res, dtime, domainCenterX, domainCenterY)
def add_expected_files(self, date):
self.files = []
self.optFiles = []
# only one file expected
- self.files.append(
- os.path.join(
- self.outputdir,
- self.res.output_filename_pattern.format(
- year=date.year,
- month=date.month,
- day=date.day,
- UTC=date.hour,
- resdir=Resources().directory,
- ),
- )
- )
+ self.files.append(os.path.join(self.outputdir,
+ self.res.output_filename_pattern.format(year=date.year,
+ month=date.month,
+ day=date.day,
+ UTC=date.hour,
+ resdir=Resources().directory)))
return
- def calc(self, proc=None):
- """run the calculation of ec-data if required.
+ def calc(self, proc=None):
+ '''run the calculation of ec-data if required.
+
Args:
proc -- A QProcess, which will be used to run a longer process in the background.
STDERR/STDOUT and signal-handler should be set. If proc is None, the
subprocess will be run in the current-process. If proc is set, the caller
- needs to wait for the proc to finish before calling other methods of this object"""
- if not self.must_calc():
+ needs to wait for the proc to finish before calling other methods of this object
+'''
+ if (not self.must_calc()):
return
- precommand = """#! /bin/bash
+ precommand = '''#! /bin/bash
cd {outputdir} || exit 1
echo "Preprocessing 5-7days icon meteorology, please wait ca. 3min"
echo "MET-Input: {globalfile}"
@@ -111,66 +105,49 @@ def calc(self, proc=None):
--output.type=nc4 \
&& mv $tmpfile {outputfile}
rm {outputdir}/running
-"""
+'''
# pressurelevel only defined for certain timesteps, different for each forecast-reference-time
- if self.globalfile.endswith("T00Z.nc") or self.globalfile.endswith("T12Z.nc"):
- timeStepList = "0,2,...,16,20,24,26,28,30,32,33,34,35,36,37"
- elif self.globalfile.endswith("T06Z.nc"):
- timeStepList = "0,1,...,33"
- elif self.globalfile.endswith("T18Z.nc"):
- timeStepList = "0,2,...,16,17,21,22,...,25"
+ if self.globalfile.endswith('T00Z.nc') or self.globalfile.endswith('T12Z.nc'):
+ timeStepList = '0,2,...,16,20,24,26,28,30,32,33,34,35,36,37'
+ elif self.globalfile.endswith('T06Z.nc'):
+ timeStepList = '0,1,...,33'
+ elif self.globalfile.endswith('T18Z.nc'):
+ timeStepList = '0,2,...,16,17,21,22,...,25'
else:
- print(
- f"ERROR: don't understand file {self.globalfile}, should end with TXXZ.nc"
- )
- command = precommand.format(
- resdir=Resources().directory,
- xAxisValues="{},{},...,{}".format(
- self.lon0,
- self.lon0 + self.res.domainDeltaX,
- self.lon0 + self.res.domainWidth,
- ),
- yAxisValues="{},{},...,{}".format(
- self.lat0,
- self.lat0 + self.res.domainDeltaY,
- self.lat0 + self.res.domainHeight,
- ),
- globalfile=self.globalfile,
- timeStepList=timeStepList,
- outputfile=self.files[0],
- outputdir=self.outputdir,
- )
+ print(f"ERROR: don't understand file {self.globalfile}, should end with TXXZ.nc")
+ command = precommand.format(resdir=Resources().directory,
+ xAxisValues="{},{},...,{}".format(self.lon0,
+ self.lon0+self.res.domainDeltaX,
+ self.lon0+self.res.domainWidth),
+ yAxisValues="{},{},...,{}".format(self.lat0,
+ self.lat0+self.res.domainDeltaY,
+ self.lat0+self.res.domainHeight),
+ globalfile=self.globalfile,
+ timeStepList=timeStepList,
+ outputfile=self.files[0],
+ outputdir=self.outputdir
+ )
scriptFile = os.path.join(self.outputdir, "command.sh")
- with open(scriptFile, "w") as script:
+ with open(scriptFile, 'w') as script:
script.write(command)
if proc is None:
- subprocess.call(["/bin/bash", scriptFile])
+ subprocess.call(['/bin/bash', scriptFile])
else:
- self.proc = proc # make sure proc lives long enough
- proc.start("/bin/bash", [scriptFile])
+ self.proc = proc # make sure proc lives long enough
+ proc.start('/bin/bash', [scriptFile])
return
-
if __name__ == "__main__":
mydate = datetime.strptime("2020-05-05T00", "%Y-%m-%dT%H")
- print(
- ICONMeteorologyCalculator.findGlobalData(
- ICONMeteorologyCalculator.getGlobalMeteoResources(), mydate
- )
- )
+ print(ICONMeteorologyCalculator.findGlobalData(ICONMeteorologyCalculator.getGlobalMeteoResources(), mydate))
try:
- ICONMeteorologyCalculator.findGlobalData(
- ICONMeteorologyCalculator.getGlobalMeteoResources(),
- datetime.strptime("2010-10-24T00", "%Y-%m-%dT%H"),
- )
+ ICONMeteorologyCalculator.findGlobalData(ICONMeteorologyCalculator.getGlobalMeteoResources(), datetime.strptime("2010-10-24T00", "%Y-%m-%dT%H"))
except Exception as e:
print(e.args[0])
- # print(EcMeteorologyCalculator(Resources(), datetime.strptime("2016-10-24T00", "%Y-%m-%dT%H"), 63, 42, None))
- met = ICONMeteorologyCalculator(
- ICONMeteorologyCalculator.getGlobalMeteoResources(), mydate, -159, 20
- ) # hawaii
+# print(EcMeteorologyCalculator(Resources(), datetime.strptime("2016-10-24T00", "%Y-%m-%dT%H"), 63, 42, None))
+ met = ICONMeteorologyCalculator(ICONMeteorologyCalculator.getGlobalMeteoResources(), mydate, -159, 20) # hawaii
print("recalc: ", met.must_calc())
met.calc()
if met.must_calc() is True:
diff --git a/utils/SnapPy/Snappy/Isotopes.py b/utils/SnapPy/Snappy/Isotopes.py
index b93b8e1c..a1eef8b5 100644
--- a/utils/SnapPy/Snappy/Isotopes.py
+++ b/utils/SnapPy/Snappy/Isotopes.py
@@ -1,12 +1,10 @@
import os
-
class Isotopes:
_isoByName = None
_isoById = None
-
def __new__(cls):
- """Isotopes singleton data"""
+ '''Isotopes singleton data'''
if cls._isoById is None:
cls._isoById = dict()
cls._isoByName = dict()
@@ -31,23 +29,23 @@ def __new__(cls):
cls._isoByName[isotope["isotope"]] = isotope
obj = object.__new__(cls)
return obj
-
+
def byId(self, id):
- """
- id is a number in the isotopes file
- return a dict with isotopte-name, type=0,1,2 (nobelgas, gas, aerosol) and decayrate (/s)
- """
+ '''
+ id is a number in the isotopes file
+ return a dict with isotopte-name, type=0,1,2 (nobelgas, gas, aerosol) and decayrate (/s)
+ '''
return self._isoById[id]
def byName(self, name):
- """
- @param name is a isotope name like Cs137
- return a dict with isotopte-name, type=0,1,2 (nobelgas, gas, aerosol) and decayrate (/s)
- """
+ '''
+ @param name is a isotope name like Cs137
+ return a dict with isotopte-name, type=0,1,2 (nobelgas, gas, aerosol) and decayrate (/s)
+ '''
return self._isoByName[name]
if __name__ == "__main__":
isotopes = Isotopes()
- print(isotopes.byName("Cs137"))
- print(isotopes.byName("I131"))
+ print(isotopes.byName('Cs137'))
+ print(isotopes.byName('I131'))
diff --git a/utils/SnapPy/Snappy/MainBrowserWindow.py b/utils/SnapPy/Snappy/MainBrowserWindow.py
index 8eed5960..9523547d 100644
--- a/utils/SnapPy/Snappy/MainBrowserWindow.py
+++ b/utils/SnapPy/Snappy/MainBrowserWindow.py
@@ -1,17 +1,17 @@
# SNAP: Servere Nuclear Accident Programme
# Copyright (C) 1992-2017 Norwegian Meteorological Institute
-#
-# This file is part of SNAP. SNAP is free software: you can
-# redistribute it and/or modify it under the terms of the
-# GNU General Public License as published by the
+#
+# This file is part of SNAP. SNAP is free software: you can
+# redistribute it and/or modify it under the terms of the
+# GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
-#
+#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
-#
+#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
#
@@ -24,7 +24,7 @@
*******************************************************************
"""
-from PyQt5 import QtCore, QtGui, QtWidgets, QtWebKitWidgets
+from PyQt5 import QtCore, QtGui, QtWidgets, QtWebKit, QtWebKitWidgets, QtNetwork
from builtins import str
import sys
@@ -41,19 +41,18 @@ def acceptNavigationRequest(self, frame, req, nav_type):
self.formSubmitted.emit(req.url())
return False
else:
- return super(StartWebPage, self).acceptNavigationRequest(
- frame, req, nav_type
- )
+ return super(StartWebPage, self).acceptNavigationRequest(frame, req, nav_type)
class MainBrowserWindow(QtWidgets.QMainWindow):
+
def __init__(self):
"""
- Create main window with browser and a button
+ Create main window with browser and a button
"""
super(MainBrowserWindow, self).__init__()
- self.resize(960, 1024)
+ self.resize(960,1024)
self.centralwidget = QtWidgets.QWidget(self)
self.mainLayout = QtWidgets.QHBoxLayout(self.centralwidget)
@@ -69,9 +68,9 @@ def __init__(self):
self.bt_back.setHidden(True)
self.bt_ahead.setHidden(True)
self.tb_url.setHidden(True)
- # self.horizontalLayout.addWidget(self.bt_back)
- # self.horizontalLayout.addWidget(self.bt_ahead)
- # self.horizontalLayout.addWidget(self.tb_url)
+# self.horizontalLayout.addWidget(self.bt_back)
+# self.horizontalLayout.addWidget(self.bt_ahead)
+# self.horizontalLayout.addWidget(self.tb_url)
self.gridLayout.addLayout(self.horizontalLayout)
self.webview = QtWebKitWidgets.QWebView()
@@ -86,9 +85,9 @@ def __init__(self):
self.set_form_handler(self._default_form_handler)
- # self.default_url = "https://dokit.met.no/fou/kl/prosjekter/eemep/esnap_userdoc"
- # self.tb_url.setText(self.default_url)
- # self.browse()
+ #self.default_url = "https://dokit.met.no/fou/kl/prosjekter/eemep/esnap_userdoc"
+ #self.tb_url.setText(self.default_url)
+ #self.browse()
def browse(self):
"""browse an url"""
@@ -97,12 +96,13 @@ def browse(self):
self.webview.load(QtCore.QUrl(url))
self.webview.show()
+
def url_changed(self, url):
- """Triggered when the url is changed"""
+ """ Triggered when the url is changed """
self.tb_url.setText(url.toString())
def set_html(self, text: str):
- """set html string"""
+ """ set html string"""
self.tb_url.setText("")
self.web_page = StartWebPage()
self.webview.setPage(self.web_page)
@@ -115,22 +115,24 @@ def _default_form_handler(dict):
print(str.format("{0} => {1}", key, value))
def set_form_handler(self, handler):
- """the form handler should accept a dictionary with query results as input"""
+ """ the form handler should accept a dictionary with query results as input """
self.form_handler = handler
def evaluate_javaScript(self, jscript):
- self.webview.page().mainFrame().evaluateJavaScript(jscript)
+ self.webview.page().mainFrame().evaluateJavaScript(jscript);
def _handle_formSubmitted(self, url):
# I don't manage to get the right query strings from the web-page
- print("handleFromSubmitted:" + url.toString())
- self.form_handler(QtCore.QUrlQuery(url).queryItems(QtCore.QUrl.FullyDecoded))
+ print("handleFromSubmitted:"+url.toString());
+ self.form_handler(QtCore.QUrlQuery(url).queryItems(QtCore.QUrl.FullyDecoded));
+
+
if __name__ == "__main__":
app = QtWidgets.QApplication(sys.argv)
main = MainBrowserWindow()
- html = """
+ html = '''
-"""
+'''
main.set_html(html)
main.show()
sys.exit(app.exec_())
diff --git a/utils/SnapPy/Snappy/MeteorologyCalculator.py b/utils/SnapPy/Snappy/MeteorologyCalculator.py
index 889bdb30..8264a13f 100644
--- a/utils/SnapPy/Snappy/MeteorologyCalculator.py
+++ b/utils/SnapPy/Snappy/MeteorologyCalculator.py
@@ -1,101 +1,95 @@
# SNAP: Servere Nuclear Accident Programme
# Copyright (C) 1992-2017 Norwegian Meteorological Institute
-#
-# This file is part of SNAP. SNAP is free software: you can
-# redistribute it and/or modify it under the terms of the
-# GNU General Public License as published by the
+#
+# This file is part of SNAP. SNAP is free software: you can
+# redistribute it and/or modify it under the terms of the
+# GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
-#
+#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
-#
+#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
#
-"""
+'''
Created on Oct 24, 2016
@author: heikok
-"""
+'''
from datetime import datetime, timedelta
import abc
from glob import iglob
import math
import os
+import subprocess
import time
-
+from Snappy.Resources import Resources
class GlobalMeteoResource:
- """containter for information needed by MeteorologyCalculator containing:
-
- indirs: inputdirectories
- outputdir: output directory
- output_filename_pattern: pattern of output file
- pathglob: glob for files in inputdirectories, e.g. ec_atmo_0_1deg_????????T??????Z_3h.nc
- pathptime: strptime for files, e.g. ec_atmo_0_1deg_%Y%m%dT%H%M%SZ_3h.nc
- path_grace_period_sec: seconds to wait before a file is used (i.e. no longer written)
- domainHeight: total y-Axis length of domain
- domainWidth: total x-Axis lenght of domain
- domainDeltaX: x-resolution of domain
- domainDeltaY: y-resolution of domain
- """
-
+ '''containter for information needed by MeteorologyCalculator containing:
+
+ indirs: inputdirectories
+ outputdir: output directory
+ output_filename_pattern: pattern of output file
+ pathglob: glob for files in inputdirectories, e.g. ec_atmo_0_1deg_????????T??????Z_3h.nc
+ pathptime: strptime for files, e.g. ec_atmo_0_1deg_%Y%m%dT%H%M%SZ_3h.nc
+ path_grace_period_sec: seconds to wait before a file is used (i.e. no longer written)
+ domainHeight: total y-Axis length of domain
+ domainWidth: total x-Axis lenght of domain
+ domainDeltaX: x-resolution of domain
+ domainDeltaY: y-resolution of domain
+ '''
indirs = []
outputdir = ""
output_filename_pattern = ""
pathglob = ""
pathptime = ""
- path_grace_period_sec = 60 * 15
+ path_grace_period_sec = 60*15
domainHeight = 10
domainWidth = 10
domainDeltaX = 0.1
domainDeltaY = 0.1
- timeoffset = (
- 0 # required offset between reference-time and first useful startup-time
- )
+ timeoffset = 0 # required offset between reference-time and first useful startup-time
class MeteoDataNotAvailableException(Exception):
def __init__(self, value):
- """exception having some kind of documention in args[0]"""
+ '''exception having some kind of documention in args[0]'''
self.parameter = value
-
def __str__(self):
return repr(self.parameter)
class MeteorologyCalculator(abc.ABC):
- """Base-class to pre-calculate/extract meteorology"""
-
+ '''Base-class to pre-calculate/extract meteorology'''
@staticmethod
- def findAllGlobalData(res: GlobalMeteoResource):
- """Static method to find all global dataset.
+ def findAllGlobalData(res: GlobalMeteoResource):
+ '''Static method to find all global dataset.
Args:
dtime: datetime object with a start-time, which should be included in the dataset
Returns:
A list of tuples with [(forecast-time, file)]
- """
- timesFiles = [] # tuples with datetime, file
+ '''
+ timesFiles = [] # tuples with datetime, file
for inDir in res.indirs:
for iFile in iglob(os.path.join(inDir, res.pathglob)):
statinfo = os.stat(iFile)
- if statinfo.st_mtime < (
- time.time() - res.path_grace_period_sec
- ): # file hasn't been changed in x sec
+ if statinfo.st_mtime < (time.time() - res.path_grace_period_sec): # file hasn't been changed in x sec
dateFile = datetime.strptime(os.path.basename(iFile), res.pathptime)
timesFiles.append((dateFile, iFile))
return timesFiles
@staticmethod
def findGlobalData(res: GlobalMeteoResource, dtime: datetime):
- """Method to find the global dataset with the latest forecast time which includes dtime.
+ '''Method to find the global dataset with the latest forecast time which includes dtime.
Args:
res: resources from getGlobalMeteoResources
@@ -106,45 +100,39 @@ def findGlobalData(res: GlobalMeteoResource, dtime: datetime):
Raises:
MeteoDataNotAvailableException: no data for the dtime can be found
- """
+ '''
timesFiles = MeteorologyCalculator.findAllGlobalData(res)
lastTimeFile = (None, None)
for timeFile in sorted(timesFiles, key=lambda t: t[0]):
- if timeFile[0] <= dtime:
+ if (timeFile[0] <= dtime):
lastTimeFile = timeFile
else:
break
- if lastTimeFile[0] is None:
- raise MeteoDataNotAvailableException(
- "no input data in {dirs} for {time}: ".format(
- dirs=res.indirs, time=dtime
- )
- )
+ if (lastTimeFile[0] is None):
+ raise MeteoDataNotAvailableException("no input data in {dirs} for {time}: ".format(dirs=res.indirs, time=dtime))
return lastTimeFile
+
@abc.abstractstaticmethod
def getGlobalMeteoResources():
- """retrieve the GlobalMeteoResources from internal resources"""
+ '''retrieve the GlobalMeteoResources from internal resources'''
pass
+
def getLat0(latCenter, domainHeight):
# get a domain starting every 10th degree
- lat0 = math.floor((latCenter - (domainHeight / 2.0)) / 10.0) * 10
- if lat0 < -80:
- lat0 = -89
- if lat0 + domainHeight > 89:
- lat0 = 89 - domainHeight
+ lat0 = math.floor((latCenter-(domainHeight/2.))/10.)*10
+ if (lat0 < -80): lat0 = -89
+ if (lat0+domainHeight > 89): lat0 = 89 - domainHeight
return lat0
-
def getLon0(lonCenter, domainWidth):
# get a domain starting every 10th degree
- lon0 = math.floor((lonCenter - (domainWidth / 2.0)) / 10.0) * 10
+ lon0 = math.floor((lonCenter-(domainWidth/2.))/10.)*10
return lon0
- def __init__(
- self, res: GlobalMeteoResource, dtime: datetime, domainCenterX, domainCenterY
- ):
- """Calculate the ec-meteorology unless it exists
+
+ def __init__(self, res: GlobalMeteoResource, dtime: datetime, domainCenterX, domainCenterY):
+ '''Calculate the ec-meteorology unless it exists
Args:
res: GlobalMeteoResource object
@@ -154,91 +142,81 @@ def __init__(
Raises:
MeteoDataNotAvailableException: no data for the dtime can be found
- """
- self.proc = None # storage for background-process
+ '''
+ self.proc = None # storage for background-process
self.res = res
- lastDateFile = self.findGlobalData(res, dtime - timedelta(hours=res.timeoffset))
+ lastDateFile = self.findGlobalData(res, dtime-timedelta(hours=res.timeoffset))
self.date = lastDateFile[0]
self.globalfile = lastDateFile[1]
utc = lastDateFile[0].hour
# domain every 10th degree
- lat0 = math.floor((domainCenterY - (res.domainHeight / 2.0)) / 10.0) * 10
- if lat0 < -80:
- lat0 = -89
- if lat0 + res.domainHeight > 89:
- lat0 = 89 - res.domainHeight
+ lat0 = math.floor((domainCenterY-(res.domainHeight/2.))/10.)*10
+ if (lat0 < -80): lat0 = -89
+ if (lat0+res.domainHeight > 89): lat0 = 89 - res.domainHeight
# snap can only cross date-line when both start and position are negative or positive
- lon0 = math.floor((domainCenterX - (res.domainWidth / 2.0)) / 10.0) * 10
+ lon0 = math.floor((domainCenterX-(res.domainWidth/2.))/10.)*10
self.lat0 = int(lat0)
self.lon0 = int(lon0)
- self.outputdir = os.path.join(
- res.outputdir,
- "NRPA_LON{x}_LAT{y}_{utc:02d}".format(x=self.lon0, y=self.lat0, utc=utc),
- )
+ self.outputdir = os.path.join(res.outputdir, "NRPA_LON{x}_LAT{y}_{utc:02d}".format(x=self.lon0, y=self.lat0, utc=utc))
# try to avoid conflicting processes (not 100% save)
i = 1
- while os.path.isfile(os.path.join(self.outputdir, "running")):
- self.outputdir = os.path.join(
- res.outputdir, "NRPA_TEMP_{utc:02d}_{i}".format(utc=utc, i=i)
- )
- i += 1
+ while (os.path.isfile(os.path.join(self.outputdir, "running"))):
+ self.outputdir = os.path.join(res.outputdir, "NRPA_TEMP_{utc:02d}_{i}".format(utc=utc, i=i))
+ i+=1
- if not os.path.exists(self.outputdir):
+ if (not os.path.exists(self.outputdir)):
os.makedirs(self.outputdir)
-
+
self.add_expected_files(lastDateFile[0])
@abc.abstractmethod
def add_expected_files(self, date):
- """add the expected files to files (must exist) and optFiles (don't need to exist).
- This method must be overwritten."""
- assert False
+ '''add the expected files to files (must exist) and optFiles (don't need to exist).
+ This method must be overwritten.'''
+ assert(False)
pass
def get_meteorology_files(self):
- """return the meteorology files"""
+ '''return the meteorology files'''
if self.must_calc():
- raise MeteoDataNotAvailableException(
- "unable to create meteo-data for {year}-{month}-{day} in {dir}".format(
- year=self.date.year,
- month=self.date.month,
- day=self.date.day,
- dir=self.outputdir,
- )
- )
+ raise MeteoDataNotAvailableException("unable to create meteo-data for {year}-{month}-{day} in {dir}".
+ format(year=self.date.year,
+ month=self.date.month,
+ day=self.date.day,
+ dir=self.outputdir))
files = self.files
for f in self.optFiles:
- if os.path.isfile(f):
+ if (os.path.isfile(f)):
files.append(f)
return files
def get_grid_startX_Y(self):
- """return a tuple with x0 and y0"""
+ '''return a tuple with x0 and y0'''
return (self.lon0, self.lat0)
def must_calc(self):
- """check if calculation is required or has been done earlier"""
+ '''check if calculation is required or has been done earlier'''
recalc = False
for f in self.files:
- if not os.path.isfile(f):
+ if (not os.path.isfile(f)):
recalc = True
return recalc
@abc.abstractmethod
def calc(self, proc=None):
- """abstract baseclass to run the calculation of meteo-data if required. Should check if self.must_calc() at the beginning.
-
+ '''abstract baseclass to run the calculation of meteo-data if required. Should check if self.must_calc() at the beginning.
+
Args:
proc -- A QProcess, which will be used to run a longer process in the background.
STDERR/STDOUT and signal-handler should be set. If proc is None, the
subprocess will be run in the current-process. If proc is set, the caller
- needs to wait for the proc to finish before calling other methods of this object"""
- # if (not self.must_calc()):
- # return
+ needs to wait for the proc to finish before calling other methods of this object
+'''
+# if (not self.must_calc()):
+# return
pass
-
if __name__ == "__main__":
pass
diff --git a/utils/SnapPy/Snappy/Resources.py b/utils/SnapPy/Snappy/Resources.py
index b64ebb93..b1b2df37 100644
--- a/utils/SnapPy/Snappy/Resources.py
+++ b/utils/SnapPy/Snappy/Resources.py
@@ -51,7 +51,6 @@ def __eq__(self, other):
def __hash__(self):
return self.value.__hash__()
-
class Resources(ResourcesCommon):
"""
Read the resources and combine them
@@ -71,14 +70,16 @@ class Resources(ResourcesCommon):
MetModel.NrpaEC0p1Global: [
"{LUSTREDIR}/project/metproduction/products/ecmwf/nc/"
],
- MetModel.EC0p1Global: ["{LUSTREDIR}/project/metproduction/products/ecmwf/nc/"],
+ MetModel.EC0p1Global: [
+ "{LUSTREDIR}/project/metproduction/products/ecmwf/nc/"
+ ],
MetModel.Icon0p25Global: ["{LUSTREDIR}/project/metproduction/products/icon/"],
}
_MET_INPUTDIRS = {
MetModel.Meps2p5: [
"{LUSTREDIR}/immutable/archive/projects/metproduction/MEPS/",
- "{LUSTREDIR}/project/fou/kl/cerad/Projects/2022_ArcticReihn/Meteo/MEPS/",
+ "{LUSTREDIR}/project/fou/kl/cerad/Projects/2022_ArcticReihn/Meteo/MEPS/"
],
MetModel.GfsGribFilter: ["/disk1/tmp/"],
}
@@ -164,7 +165,7 @@ def getIconPath(self):
return os.path.join(self.directory, "radioMapIcon.png")
def getIsotopes(self):
- """return a dictionary of isotope-ids mapping to a dictionary with isotope,type and decay"""
+ """ return a dictionary of isotope-ids mapping to a dictionary with isotope,type and decay"""
isotopes = dict()
with open(
os.path.join(self.directory, "isotope_list.txt"), mode="r", encoding="UTF-8"
@@ -184,10 +185,10 @@ def getIsotopes(self):
return isotopes
def isotopes2isoIds(self, isotopes: list[str | int]) -> list[int]:
- """
+ '''
translate a list of isotopes, i.e. ['Cs-137', ...] or ['Cs137', ...] or ['17', ...]
to argos isotope id's
- """
+ '''
retval = []
allIsos = self.getIsotopes()
for iso in isotopes:
@@ -199,9 +200,9 @@ def isotopes2isoIds(self, isotopes: list[str | int]) -> list[int]:
except Exception:
pass
if isoId == -1:
- iso = iso.replace("-", "")
+ iso = iso.replace('-', '')
for iId, isoDict in allIsos.items():
- if iso == isoDict["isotope"]:
+ if iso == isoDict['isotope']:
isoId = iId
break
if isoId == -1:
@@ -209,17 +210,19 @@ def isotopes2isoIds(self, isotopes: list[str | int]) -> list[int]:
retval.append(isoId)
return retval
+
def isotopes2snapinput(self, isotopeIds, add_DPUI=True):
"""Read a list of isotopeIds and return a text-block to be used for a snap.input file, like
- COMPONENT= Cs137
- RADIOACTIVE.DECAY.ON
- HALF.LIFETIME.YEARS= 30
- DRY.DEP.ON
- WET.DEP.ON
- RADIUS.MICROMETER=0.55
- DENSITY.G/CM3=2.3
- GRAVITY.FIXED.M/S=0.0002
- FIELD.IDENTIFICATION=01"""
+COMPONENT= Cs137
+RADIOACTIVE.DECAY.ON
+HALF.LIFETIME.YEARS= 30
+DRY.DEP.ON
+WET.DEP.ON
+RADIUS.MICROMETER=0.55
+DENSITY.G/CM3=2.3
+GRAVITY.FIXED.M/S=0.0002
+FIELD.IDENTIFICATION=01
+"""
if add_DPUI:
dosecoeff = self.getDoseCoefficients()
else:
@@ -266,7 +269,7 @@ def isotopes2snapinput(self, isotopeIds, add_DPUI=True):
DPUI = dosecoeff.DPUI(iso["isotope"], "particulate")
else:
raise Exception(
- "Error, unknown type '{0}' for isotope '{1}'".format(
+ "Error, unknown type '{1}' for isotope '{2}'".format(
iso["type"], iso["isotope"]
)
)
@@ -333,7 +336,7 @@ def _getGribWriterConfig(self, isoIds, setFillValue=True):
) as ncmlFH:
ncmlOut = ncmlFH.read()
ncmlOut = ncmlOut.format(variables="\n".join(varFills))
-
+
return {
"extracts": extracts,
"xml": xmlOut,
@@ -373,16 +376,14 @@ def readNPPs(
nppsFile.close()
return OrderedDict(sorted(npps.items(), key=lambda t: t[0].lower()))
- def readRadnett(
- self,
- ):
+ def readRadnett(self,):
stations = OrderedDict()
with open(
os.path.join(os.path.dirname(__file__), "resources/radnett.csv"),
mode="r",
encoding="UTF-8",
) as f:
- degree_minute_regex = re.compile(r"([0-9]+)°\s([0-9]+)’\s[NØ]")
+ degree_minute_regex = re.compile("([0-9]+)°\s([0-9]+)’\s[NØ]")
for line in f:
if line.startswith("#"):
continue
@@ -651,11 +652,12 @@ def getDoseCoefficients(self):
dosecoeffs = read_dosecoefficients_icrp.DoseCoefficientsICRP(
os.path.join(self.directory, "1-s2.0-S0146645313000110-mmc1.zip")
)
- except Exception:
+ except Exception as e:
dosecoeffs = None
return dosecoeffs
+
# setting bitmapCompress as default to False
# fimex drops all fields which are completely missing, which argos doesn't like
# waiting for fimex-fix
@@ -675,65 +677,56 @@ def snapNc_convert_to_grib(snapNc, basedir, ident, isotopes, bitmapCompress=Fals
basexmlFile = os.path.join(basedir, xmlFile)
ncmlFile = "config.ncml"
baseNcmlFile = os.path.join(basedir, ncmlFile)
- with open(baseNcmlFile, "w") as nh:
- nh.write(config["ncml"])
-
+ with open(baseNcmlFile, 'w') as nh:
+ nh.write(config['ncml'])
+
errlog = open(os.path.join(basedir, "fimex.errlog"), "w")
outlog = open(os.path.join(basedir, "fimex.outlog"), "w")
- tempfile = "tmp.grib"
+ tempfile = 'tmp.grib'
basetempfile = os.path.join(basedir, tempfile)
# fimex works in basedir, so it does not need the basefiles
- for appendix, params in config["extracts"].items():
- if appendix == "tofa":
+ for appendix, params in config['extracts'].items():
+ if appendix == 'tofa':
omitEmptyFields = True
else:
omitEmptyFields = False
- with open(basexmlFile, "w") as xh:
- xh.write(config["xml"].format(OMIT_EMPTY_FIELDS=omitEmptyFields))
+ with open(basexmlFile, 'w') as xh:
+ xh.write(config['xml'].format(OMIT_EMPTY_FIELDS=omitEmptyFields))
outFile = os.path.join(basedir, f"{ident}_{appendix}")
- with open(outFile, "wb") as gh:
+ with open(outFile, 'wb') as gh:
for param in params:
- if os.path.exists(basetempfile):
+ if (os.path.exists(basetempfile)):
os.remove(basetempfile)
- procOptions = [
- "fimex",
- f"--input.file={snapNc}",
- f"--input.config={ncmlFile}",
- # avoid problem with lat/lon variables
- # in fimex grib-writer< 0.64
- # '--extract.removeVariable=longitude',
- # '--extract.removeVariable=latitude',
- f"--output.file={tempfile}",
- "--output.type=grib",
- f"--output.config={xmlFile}",
- ]
- procOptions.append(f"--extract.selectVariables={param}")
+ procOptions = ['fimex', f'--input.file={snapNc}', f'--input.config={ncmlFile}',
+ # avoid problem with lat/lon variables
+ # in fimex grib-writer< 0.64
+ # '--extract.removeVariable=longitude',
+ # '--extract.removeVariable=latitude',
+ f'--output.file={tempfile}',
+ '--output.type=grib', f'--output.config={xmlFile}']
+ procOptions.append(f'--extract.selectVariables={param}')
print(" ".join(procOptions))
- proc = subprocess.Popen(
- procOptions, cwd=basedir, stderr=errlog, stdout=outlog
- )
- if proc.wait() != 0:
- errlog.write(
- "'{fimex}' in {dir} failed".format(
- fimex=" ".join(procOptions), dir=basedir
- )
- )
+ proc = subprocess.Popen(procOptions, cwd=basedir, stderr=errlog, stdout=outlog)
+ if (proc.wait() != 0):
+ errlog.write("'{fimex}' in {dir} failed".format(fimex=' '.join(procOptions), dir=basedir))
else:
# append tmp-file to final grib-file
- with (open(basetempfile, "rb")) as th:
+ with (open(basetempfile, 'rb')) as th:
while True:
- data = th.read(16 * 1024 * 1024) # read max 16M blocks
+ data = th.read(16*1024*1024) # read max 16M blocks
if data:
gh.write(data)
else:
break
- if os.path.exists(basetempfile):
+ if (os.path.exists(basetempfile)):
os.remove(basetempfile)
errlog.close()
outlog.close()
+
+
if __name__ == "__main__":
print(Resources().getStartScreen())
print(
@@ -761,7 +754,7 @@ def snapNc_convert_to_grib(snapNc, basedir, ident, isotopes, bitmapCompress=Fals
)
)
print(Resources().getDoseCoefficients())
- isotopes = ["Cs-137", "Cs134"]
+ isotopes = ['Cs-137', 'Cs134']
isoIds = Resources().isotopes2isoIds(isotopes)
print(f"f{isotopes} have ids: {isoIds}")
assert len(isotopes) == len(isoIds)
diff --git a/utils/SnapPy/Snappy/SnapController.py b/utils/SnapPy/Snappy/SnapController.py
index 6ecde0c3..acf840f4 100644
--- a/utils/SnapPy/Snappy/SnapController.py
+++ b/utils/SnapPy/Snappy/SnapController.py
@@ -23,7 +23,7 @@
from time import gmtime, strftime
import traceback
-from PyQt5 import QtWidgets
+from PyQt5 import QtWidgets, QtGui
from PyQt5.QtCore import (
QProcess,
QProcessEnvironment,
@@ -35,10 +35,7 @@
from Snappy.BrowserWidget import BrowserWidget
from Snappy.EcMeteorologyCalculator import EcMeteorologyCalculator
from Snappy.ICONMeteorologyCalculator import ICONMeteorologyCalculator
-from Snappy.MeteorologyCalculator import (
- MeteoDataNotAvailableException,
- MeteorologyCalculator,
-)
+from Snappy.MeteorologyCalculator import MeteoDataNotAvailableException, MeteorologyCalculator
from Snappy.MailImages import sendPngsFromDir
from Snappy.Resources import Resources, MetModel
from Snappy.SnapInputBomb import SnapInputBomb, ExplosionType
@@ -109,7 +106,9 @@ def start(self):
self.snap_controller.snapRunning = "running"
debug("started: " + self.snap_controller.snapRunning)
else:
- self.snap_controller.write_log("starting bsnap_naccident snap.input failed")
+ self.snap_controller.write_log(
+ "starting bsnap_naccident snap.input failed"
+ )
class SnapController:
@@ -174,7 +173,7 @@ def _met_finished_run_snap(self):
self.res.getSnapInputMetDefinitions(
self.lastQDict["metmodel"],
self.metcalc.get_meteorology_files(),
- **metdefs,
+ **metdefs
)
)
self._snap_model_run()
@@ -378,10 +377,8 @@ def run_snap_query(self, qDict):
except ValueError as ve:
latf = 0.0
lonf = 0.0
- errors += (
- "Cannot interprete latitude/longitude: {lat}/{lon}: {ex}\n".format(
- lat=lat, lon=lon, ex=ve
- )
+ errors += "Cannot interprete latitude/longitude: {lat}/{lon}: {ex}\n".format(
+ lat=lat, lon=lon, ex=ve
)
if len(errors) > 0:
@@ -467,24 +464,14 @@ def run_snap_query(self, qDict):
elif qDict["metmodel"] == MetModel.EC0p1Global:
try:
globalRes = EcMeteorologyCalculator.getGlobalMeteoResources()
- files = [
- x[1]
- for x in sorted(
- MeteorologyCalculator.findAllGlobalData(globalRes),
- key=lambda x: x[0],
- )
- ]
+ files = [x[1] for x in sorted(MeteorologyCalculator.findAllGlobalData(globalRes), key=lambda x: x[0])]
lat0 = MeteorologyCalculator.getLat0(latf, globalRes.domainHeight)
lon0 = MeteorologyCalculator.getLon0(lonf, globalRes.domainWidth)
with open(os.path.join(self.lastOutputDir, "snap.input"), "a") as fh:
- fh.write("FIELD.TYPE=fimex\n")
- fh.write("FIMEX.FILE_TYPE=netcdf\n")
- fh.write(
- f"FIMEX.INTERPOLATION=nearest|+proj=latlon +R=6371000 +no_defs|{lon0},{lon0+0.2},...,{lon0+globalRes.domainWidth}|{lat0},{lat0+0.2},...,{lat0+globalRes.domainHeight}|degree\n"
- )
- fh.write(
- self.res.getSnapInputMetDefinitions(qDict["metmodel"], files)
- )
+ fh.write(f"FIELD.TYPE=fimex\n")
+ fh.write(f"FIMEX.FILE_TYPE=netcdf\n")
+ fh.write(f"FIMEX.INTERPOLATION=nearest|+proj=latlon +R=6371000 +no_defs|{lon0},{lon0+0.2},...,{lon0+globalRes.domainWidth}|{lat0},{lat0+0.2},...,{lat0+globalRes.domainHeight}|degree\n")
+ fh.write(self.res.getSnapInputMetDefinitions(qDict["metmodel"], files))
self._snap_model_run()
except MeteoDataNotAvailableException as e:
self.write_log("problems finding global EC-met: {}".format(e.args[0]))
diff --git a/utils/SnapPy/Snappy/SnapControllerInverse.py b/utils/SnapPy/Snappy/SnapControllerInverse.py
index 356af9bf..d1a7465b 100644
--- a/utils/SnapPy/Snappy/SnapControllerInverse.py
+++ b/utils/SnapPy/Snappy/SnapControllerInverse.py
@@ -1,17 +1,17 @@
# SNAP: Servere Nuclear Accident Programme
# Copyright (C) 1992-2017 Norwegian Meteorological Institute
-#
-# This file is part of SNAP. SNAP is free software: you can
-# redistribute it and/or modify it under the terms of the
-# GNU General Public License as published by the
+#
+# This file is part of SNAP. SNAP is free software: you can
+# redistribute it and/or modify it under the terms of the
+# GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
-#
+#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
-#
+#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
#
@@ -23,33 +23,28 @@
import sys
from time import gmtime, strftime
-from PyQt5 import QtWidgets
-from PyQt5.QtCore import (
- QThread,
- QThreadPool,
-)
+from PyQt5 import QtWidgets, QtGui
+from PyQt5.QtCore import QProcess, QProcessEnvironment, QThread, QIODevice, QThreadPool, pyqtSignal
from Snappy.BrowserWidget import BrowserWidget
from Snappy.Resources import Resources
from Snappy.SnapController import SnapRun, SnapUpdateThread
import Snappy.Utils
-
def debug(*objs):
print("DEBUG: ", *objs, file=sys.stderr)
-
class SnapRunInverse(SnapRun):
+
def start(self, snapscript):
- debug("outputdir: " + self.snap_controller.lastOutputDir)
- # self.proc.start('/home/heikok/sleepLong.sh', ['snap.input'])
+ debug("outputdir: "+self.snap_controller.lastOutputDir)
+# self.proc.start('/home/heikok/sleepLong.sh', ['snap.input'])
self.proc.start(snapscript)
- if self.proc.waitForStarted(3000):
+ if (self.proc.waitForStarted(3000)) :
self.snap_controller.snapRunning = "running"
- debug("started " + snapscript + " " + self.snap_controller.snapRunning)
+ debug("started " + snapscript + " "+ self.snap_controller.snapRunning)
else:
self.snap_controller.write_log("starting {} failed".format(snapscript))
-
class Measurement:
def __init__(self, id, name, lon, lat, start, end):
self.id = id
@@ -58,52 +53,37 @@ def __init__(self, id, name, lon, lat, start, end):
self.lat = lat
self.start = start
self.end = end
-
+
class SnapControllerInverse:
def __init__(self):
self.res = Resources()
- self.lastOutputDir = os.path.join(
- self.res.getSnapOutputDir(),
- "{0}_{1}".format("backtrack", strftime("%Y-%m-%dT%H%M%S", gmtime())),
- )
+ self.lastOutputDir = os.path.join(self.res.getSnapOutputDir(), "{0}_{1}".format("backtrack", strftime("%Y-%m-%dT%H%M%S", gmtime())))
self.main = BrowserWidget()
- self.main.set_html(
- self.res.getStartScreenInverse().replace("OUTPUTDIR", self.lastOutputDir)
- )
+ self.main.set_html(self.res.getStartScreenInverse().replace("OUTPUTDIR", self.lastOutputDir))
self.main.set_form_handler(self._create_snap_form_handler())
self.main.show()
self.snapRunning = "inactive"
self.lastQDict = {}
- def write_log(self, txt: str):
+ def write_log(self, txt:str):
debug(txt)
- self.main.evaluate_javaScript("updateSnapLog({0});".format(json.dumps(txt)))
+ self.main.evaluate_javaScript('updateSnapLog({0});'.format(json.dumps(txt)))
def _snap_finished(self):
debug("finished")
self.snapRunning = "finished"
- # self.plot_results()
- with open(os.path.join(self.lastOutputDir, "snap.log.out"), "a") as logFile:
- logFile.write(
- "All work finished. Please open 'vgl-launch diana -s {dir}/diana.setup' to see results.\n".format(
- dir=self.lastOutputDir
- )
- )
+ #self.plot_results()
+ with open(os.path.join(self.lastOutputDir,"snap.log.out"), "a") as logFile:
+ logFile.write("All work finished. Please open 'vgl-launch diana -s {dir}/diana.setup' to see results.\n".format(dir=self.lastOutputDir))
self.update_log()
def _defaultDomainCheck(self, lonf, latf):
- if (
- latf <= self.res.ecDefaultDomainStartY
- or latf >= (self.res.ecDefaultDomainStartY + self.res.ecDomainHeight)
- or lonf <= self.res.ecDefaultDomainStartX
- or lonf >= self.res.ecDefaultDomainStartX + self.res.ecDomainWidth
- ):
- self.write_log(
- "(lat,lon) = ({lat},{lon}) outside domain.\nTry global EC meteorology under advanced.".format(
- lat=latf, lon=lonf
- )
- )
+ if (latf <= self.res.ecDefaultDomainStartY or
+ latf >= (self.res.ecDefaultDomainStartY + self.res.ecDomainHeight) or
+ lonf <= self.res.ecDefaultDomainStartX or
+ lonf >= self.res.ecDefaultDomainStartX + self.res.ecDomainWidth):
+ self.write_log("(lat,lon) = ({lat},{lon}) outside domain.\nTry global EC meteorology under advanced.".format(lat=latf, lon=lonf))
return False
return True
@@ -122,89 +102,66 @@ def run_snap_query(self, qDict):
errors = ""
self.measurements = []
# measurementX, latX, lonX, startX, encX
- for i in range(1, 100):
- if "measurement{}".format(i) not in qDict:
+ for i in range(1,100):
+ if 'measurement{}'.format(i) not in qDict:
continue
- name = qDict["measurement{}".format(i)]
- for tag in ("lat{}", "lon{}", "start{}", "end{}"):
+ name = qDict['measurement{}'.format(i)]
+ for tag in ('lat{}', 'lon{}', 'start{}', 'end{}'):
if tag.format(i) not in qDict:
- errors += "No tag " + tag.format(i) + " for " + name + "\n"
+ errors += "No tag " + tag.format(i) + " for " + name + "\n"
if len(errors) > 0:
continue
- match = re.search(
- r"(\d{4})-(\d{2})-(\d{2})[\+\s]+(\d{1,2})", qDict["start{}".format(i)]
- )
+ match = re.search(r'(\d{4})-(\d{2})-(\d{2})[\+\s]+(\d{1,2})', qDict['start{}'.format(i)])
if match:
- startDT = datetime.datetime(
- *tuple(map(int, list(match.group(1, 2, 3, 4))))
- )
+ startDT = datetime.datetime(*tuple(map(int, list(match.group(1,2,3,4)))))
else:
- errors += "Cannot interprete startTime: {0}\n".format(
- qDict["start{}".format(i)]
- )
- match = re.search(
- r"(\d{4})-(\d{2})-(\d{2})[\+\s]+(\d{1,2}):(\d{1,2})",
- qDict["end{}".format(i)],
- )
+ errors += "Cannot interprete startTime: {0}\n".format(qDict['start{}'.format(i)])
+ match = re.search(r'(\d{4})-(\d{2})-(\d{2})[\+\s]+(\d{1,2}):(\d{1,2})', qDict['end{}'.format(i)])
if match:
- endDT = datetime.datetime(
- *tuple(map(int, list(match.group(1, 2, 3, 4))))
- )
+ endDT = datetime.datetime(*tuple(map(int, list(match.group(1,2,3,4)))))
if int(match.group(5)) > 0:
endDT = endDT + datetime.timedelta(hours=1)
else:
- errors += "Cannot interprete endTime: {0}\n".format(
- qDict["end{}".format(i)]
- )
+ errors += "Cannot interprete endTime: {0}\n".format(qDict['end{}'.format(i)])
if startDT >= endDT:
errors += "Start must be before end for {}\n".format(name)
- lat = qDict["lat{}".format(i)]
- lon = qDict["lon{}".format(i)]
+ lat = qDict['lat{}'.format(i)]
+ lon = qDict['lon{}'.format(i)]
try:
latf = Snappy.Utils.parseLat(lat)
lonf = Snappy.Utils.parseLon(lon)
except ValueError as ve:
- latf = 0.0
- lonf = 0.0
- errors += (
- "Cannot interprete latitude/longitude: {lat}/{lon}: {ex}\n".format(
- lat=lat, lon=lon, ex=ve
- )
- )
+ latf = 0.
+ lonf = 0.
+ errors += "Cannot interprete latitude/longitude: {lat}/{lon}: {ex}\n".format(lat=lat,lon=lon,ex=ve);
if len(errors) == 0:
- self.measurements.append(
- Measurement(i, name, lonf, latf, startDT, endDT)
- )
+ self.measurements.append(Measurement(i,name,lonf, latf, startDT, endDT))
+
debug("output directory: {}".format(self.lastOutputDir))
if not os.path.isdir(self.lastOutputDir):
try:
os.mkdir(self.lastOutputDir)
- except Exception:
+ except:
errors += "cannot create directory: {}".format(self.lastOutputDir)
else:
- errors += "cowardly refusing to write into existing directory: {}".format(
- self.lastOutputDir
- )
+ errors += "cowardly refusing to write into existing directory: {}".format(self.lastOutputDir)
- if len(errors) > 0:
- debug('updateSnapLog("{0}");'.format(json.dumps("ERRORS:\n\n" + errors)))
+ if (len(errors) > 0):
+ debug('updateSnapLog("{0}");'.format(json.dumps("ERRORS:\n\n"+errors)))
self.write_log("ERRORS:\n\n{0}".format(errors))
return
curtime = gmtime()
self.lastQDict = qDict
- self.write_log(
- "working with {number} measurements in {dir}".format(
- number=len(self.measurements), dir=self.lastOutputDir
- )
- )
+ self.write_log("working with {number} measurements in {dir}".format(number=len(self.measurements), dir=self.lastOutputDir))
+
# write snap.input files
for mes in self.measurements:
- print("{id} {name}".format(id=mes.id, name=mes.name))
+ print("{id} {name}".format(id=mes.id,name=mes.name))
releaseDT = mes.end - mes.start
- releaseH = releaseDT.days * 24 + math.ceil(releaseDT.seconds / 3600)
+ releaseH = releaseDT.days * 24 + math.ceil(releaseDT.seconds/3600)
sourceTerm = """
SIMULATION.START.DATE={simStart}
SET_RELEASE.POS= P= {lat}, {lon}
@@ -219,73 +176,50 @@ def run_snap_query(self, qDict):
RELEASE.UPPER.M= {upperHeight}, {upperHeight}
RELEASE.BQ/SEC.COMP= 1e12, 1e12, 'Cs137'
"""
- self.lastSourceTerm = sourceTerm.format(
- simStart=strftime("%Y-%m-%d_%H:%M:%S", curtime),
- lat=mes.lat,
- lon=mes.lon,
- startTime=mes.end.strftime("%Y %m %d %H"),
- runTime=runTime,
- releaseTime=releaseH,
- radius=500,
- lowerHeight=0,
- upperHeight=250,
- )
-
- with open(
- os.path.join(self.lastOutputDir, "snap.input{}".format(mes.id)), "w"
- ) as fh:
+ self.lastSourceTerm = sourceTerm.format(simStart=strftime("%Y-%m-%d_%H:%M:%S",curtime),
+ lat=mes.lat, lon=mes.lon, startTime=mes.end.strftime("%Y %m %d %H"),
+ runTime=runTime,
+ releaseTime=releaseH,
+ radius=500,
+ lowerHeight=0, upperHeight=250)
+
+
+ with open(os.path.join(self.lastOutputDir, "snap.input{}".format(mes.id)),'w') as fh:
fh.write(self.lastSourceTerm)
# add Cs137 definition
fh.write(self.res.isotopes2snapinput([169]))
- metmodel = "nrpa_ec_0p1"
- if metmodel == "nrpa_ec_0p1":
- if "metpattern" in qDict:
- files = self.res.getECMeteorologyFiles(
- startDT, runTime, pattern=qDict["metpattern"]
- )
- if len(files) == 0:
- self.write_log(
- "no EC met-files found for {}, runtime {} with pattern {}".format(
- startDT, runTime, qDict["metpattern"]
- )
- )
+ metmodel = 'nrpa_ec_0p1'
+ if (metmodel == 'nrpa_ec_0p1'):
+ if ('metpattern' in qDict):
+ files = self.res.getECMeteorologyFiles(startDT, runTime, pattern=qDict['metpattern'])
+ if (len(files) == 0):
+ self.write_log("no EC met-files found for {}, runtime {} with pattern {}".format(startDT, runTime, qDict['metpattern']))
return
else:
files = self.res.getECMeteorologyFiles(startDT, runTime)
- if len(files) == 0:
- self.write_log(
- "no EC met-files found for {}, runtime {}".format(
- startDT, runTime
- )
- )
+ if (len(files) == 0):
+ self.write_log("no EC met-files found for {}, runtime {}".format(startDT, runTime))
return
- if not self._defaultDomainCheck(lonf, latf):
- return
+ if (not self._defaultDomainCheck(lonf,latf)):
+ return
snapIn = self.res.getSnapInputMetDefinitions(metmodel, files)
- snapIn = snapIn.replace(
- "snap.", "snap{}.".format(mes.id)
- ) # replace snap.nc and snap.log to snap1.nc snap1.log
- with open(
- os.path.join(self.lastOutputDir, "snap.input{}".format(mes.id)), "a"
- ) as fh:
+ snapIn = snapIn.replace("snap.", "snap{}.".format(mes.id)) # replace snap.nc and snap.log to snap1.nc snap1.log
+ with open(os.path.join(self.lastOutputDir, "snap.input{}".format(mes.id)),'a') as fh:
fh.write(snapIn)
-
+
snapscript = os.path.join(self.lastOutputDir, "snap.sh")
- with open(snapscript, "a") as fh:
+ with open(snapscript,'a') as fh:
fh.write("#! /bin/bash\n")
fh.write("cd {}\n".format(self.lastOutputDir))
ids = " ".join([str(x.id) for x in self.measurements])
- fh.write(r"parallel -i -j 4 bsnap_naccident snap.input{} -- " + ids + "\n")
- joinIds = " ".join(["-i snap{}.nc".format(x.id) for x in self.measurements])
- fh.write(
- "snapCombineInverse -I Cs137 -o snapCombined.nc {}\n".format(joinIds)
- )
-
+ fh.write(r'parallel -i -j 4 bsnap_naccident snap.input{} -- ' +ids + "\n")
+ joinIds = " ".join(["-i snap{}.nc".format(x.id) for x in self.measurements])
+ fh.write("snapCombineInverse -I Cs137 -o snapCombined.nc {}\n".format(joinIds))
+
# create diana.setup
- with open(os.path.join(self.lastOutputDir, "diana.setup"), "w") as fh:
- fh.write(
- """
+ with open(os.path.join(self.lastOutputDir, "diana.setup"), 'w') as fh:
+ fh.write('''
%include /etc/diana/setup/diana.setup-COMMON
field=source_probability
@@ -308,16 +242,9 @@ def run_snap_query(self, qDict):
filegroup=snapBacktrack
m=combined t=fimex format=netcdf f={dir}/snapCombined.nc
- """.format(
- dir=self.lastOutputDir
- )
- )
+ '''.format(dir=self.lastOutputDir))
for m in self.measurements:
- fh.write(
- "m={name} t=fimex format=netcdf f={dir}/snap{id}.nc\n".format(
- name=m.name, dir=self.lastOutputDir, id=m.id
- )
- )
+ fh.write("m={name} t=fimex format=netcdf f={dir}/snap{id}.nc\n".format(name=m.name, dir=self.lastOutputDir, id=m.id))
fh.write("\n")
os.chmod(snapscript, 0o755)
self._snap_model_run(snapscript)
@@ -331,13 +258,14 @@ def _snap_model_run(self, snapscript):
self.snap_update.update_log_signal.connect(self.update_log)
self.snap_update.start(QThread.LowPriority)
+
def update_log_query(self, qDict):
- # MainBrowserWindow._default_form_handler(qDict)
+ #MainBrowserWindow._default_form_handler(qDict)
self.write_log("updating...")
- if os.path.isfile(os.path.join(self.lastOutputDir, "snap.log.out")):
- lfh = open(os.path.join(self.lastOutputDir, "snap.log.out"))
- debug(tail(os.path.join(self.lastOutputDir, "snap.log.out"), 30))
- self.write_log(tail(os.path.join(self.lastOutputDir, "snap.log.out"), 30))
+ if os.path.isfile(os.path.join(self.lastOutputDir,"snap.log.out")) :
+ lfh = open(os.path.join(self.lastOutputDir,"snap.log.out"))
+ debug(tail(os.path.join(self.lastOutputDir,"snap.log.out"),30))
+ self.write_log(tail(os.path.join(self.lastOutputDir,"snap.log.out"), 30))
lfh.close()
def update_log(self):
@@ -346,45 +274,43 @@ def update_log(self):
def _create_snap_form_handler(self):
def handler(queryDict):
"""a form-handler with closure for self"""
- options = {"Run": self.run_snap_query, "Update": self.update_log_query}
+ options = { 'Run' : self.run_snap_query,
+ 'Update' : self.update_log_query
+ }
# mapping from QList to simple dictionary
qDict = dict()
for key, value in queryDict:
qDict[key] = value
# calling the correct handler depending on the module
try:
- options[qDict["action"]](qDict)
+ options[qDict['action']](qDict)
except TypeError as ex:
self.write_log("type-error: {}".format(ex))
except ValueError as ex:
self.write_log("value-error: {}".format(ex))
- except Exception:
- self.write_log(
- "Unexpected error on {0}: {1}".format(
- qDict["action"], sys.exc_info()[0]
- )
- )
+ except:
+ self.write_log("Unexpected error on {0}: {1}".format(qDict['action'],sys.exc_info()[0]))
raise
-
return handler
+
def tail(f, n):
fh = open(f)
lines = []
while 1:
line = fh.readline()
- if line:
+ if (line):
lines.append(line)
if len(lines) > n:
- lines = lines[len(lines) - n :]
+ lines = lines[len(lines)-n:]
else:
break
return "".join(lines)
-
if __name__ == "__main__":
debug("threads: {}".format(QThreadPool.globalInstance().maxThreadCount()))
app = QtWidgets.QApplication(sys.argv)
ctr = SnapControllerInverse()
sys.exit(app.exec_())
+
diff --git a/utils/SnapPy/Snappy/SnapInputBomb.py b/utils/SnapPy/Snappy/SnapInputBomb.py
index 9b228f37..68e83c45 100644
--- a/utils/SnapPy/Snappy/SnapInputBomb.py
+++ b/utils/SnapPy/Snappy/SnapInputBomb.py
@@ -5,12 +5,9 @@
import io
import typing
-_ExplosionType = namedtuple(
- "ExplosionType", ["name", "radius_sizes", "size_distribution"]
-)
+_ExplosionType = namedtuple('ExplosionType', ['name', 'radius_sizes', "size_distribution"])
-
-class ExplosionType(Enum):
+class ExplosionType(Enum):
@property
def radius_sizes(self):
return self.value.radius_sizes
@@ -20,49 +17,46 @@ def size_distribution(self):
return self.value.size_distribution
# default snap
- MIXED = _ExplosionType(
- "Mixed",
- [2.2, 4.4, 8.6, 14.6, 22.8, 36.1, 56.5, 92.3, 173.2, 250.0],
- [0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1],
- )
+ MIXED = _ExplosionType("Mixed",
+ [2.2, 4.4, 8.6, 14.6, 22.8, 36.1, 56.5, 92.3, 173.2, 250.0],
+ [ .1, .1, .1, .1, .1, .1, .1, .1, .1, .1]
+ )
# Glasstone Dolan, lognormal ~(3.78, 0.68)
- SURFACE = _ExplosionType(
- "Surface",
- [8.6, 14.6, 22.8, 36.1, 56.5, 92.3, 173.2, 250.0],
- [0.02, 0.08, 0.17, 0.25, 0.24, 0.17, 0.06, 0.01],
- )
+ SURFACE = _ExplosionType("Surface",
+ [ 8.6, 14.6, 22.8, 36.1, 56.5, 92.3, 173.2, 250.0],
+ [ .02, .08, .17, .25, .24, .17, .06, .01]
+ )
# Glassstone Dolan, uniform below 20µm
- HIGH_ALTITUDE = _ExplosionType(
- "High Altitude", [2.2, 4.4, 8.6, 14.6], [0.25, 0.25, 0.25, 0.25]
- )
+ HIGH_ALTITUDE = _ExplosionType("High Altitude",
+ [2.2, 4.4, 8.6, 14.6],
+ [.25, .25, .25, .25]
+ )
@classmethod
def by_argosname(cls, name: str):
"""get a ExplosionType by name used in Argos. Unknown names will be translated to MIXED"""
name = name.lower().strip()
- if name == "surface":
+ if name == 'surface':
return ExplosionType.SURFACE
- elif name == "1000 meters" or name == "high altitude":
+ elif name == '1000 meters' or name == 'high altitude':
# '1000 meters' was previous name
return ExplosionType.HIGH_ALTITUDE
else:
return ExplosionType.MIXED
-def _lin_interpol(a0, a, b, x, y):
- """linear interpolation of x=f(a), y=f(b) to f(a0)"""
- if a == b:
+def _lin_interpol(a0,a,b, x,y):
+ '''linear interpolation of x=f(a), y=f(b) to f(a0)'''
+ if (a == b):
return x
else:
- return x + (a0 - a) * (y - x) / (b - a)
-
+ return x + (a0-a)*(y-x)/(b-a)
-class YieldParameters:
+class YieldParameters():
"""
class translating yield parameters depending on ExplosionType
"""
-
- _cloud_defs1 = """yield bottom top thickness
+ _cloud_defs1 = '''yield bottom top thickness
0 0 0 0
0.5 1500 2250 750
1 3000 4000 1000
@@ -79,18 +73,14 @@ class translating yield parameters depending on ExplosionType
20 6500 9500 3000
25 6500 9500 3000
30 6500 9500 3000
-"""
+'''
- def __init__(
- self,
- nuclear_yield: float = 15,
- explosion_type: ExplosionType = ExplosionType.MIXED,
- ) -> None:
+ def __init__(self, nuclear_yield: float = 15, explosion_type: ExplosionType = ExplosionType.MIXED) -> None:
self._nuclear_yield = nuclear_yield
self._explosion_type = explosion_type
self._cloud_defs = self._parse_clouds(io.StringIO(self._cloud_defs1))
return
-
+
@property
def nuclear_yield(self):
return self._nuclear_yield
@@ -99,9 +89,14 @@ def nuclear_yield(self):
def explosion_type(self):
return self._explosion_type
+
def _parse_clouds(self, cloud_def: typing.TextIO) -> dict:
- """ensure that input-file is ordered by yield"""
- retval = {"yield": [], "bottom": [], "top": []}
+ '''ensure that input-file is ordered by yield'''
+ retval = {
+ 'yield': [],
+ 'bottom': [],
+ 'top': []
+ }
reader = csv.DictReader(cloud_def, delimiter="\t")
for row in reader:
for tag in retval.keys():
@@ -109,11 +104,11 @@ def _parse_clouds(self, cloud_def: typing.TextIO) -> dict:
for tag in retval.keys():
retval[tag] = numpy.asarray(retval[tag])
return retval
-
+
def _get_linear_cloud_def(self, tag) -> dict:
- """get a dict of cloud bottom, top, and radius and stemradius depending on yield"""
- cloudyield = self._cloud_defs["yield"]
- pos = numpy.argmin(numpy.abs(cloudyield - self._nuclear_yield))
+ '''get a dict of cloud bottom, top, and radius and stemradius depending on yield'''
+ cloudyield = self._cloud_defs['yield']
+ pos = numpy.argmin(numpy.abs(cloudyield-self._nuclear_yield))
# linear interpolation
if cloudyield[pos] > self._nuclear_yield:
pos1 = pos - 1
@@ -123,13 +118,9 @@ def _get_linear_cloud_def(self, tag) -> dict:
pos1 = pos + 1
if pos1 >= cloudyield.shape[0]:
pos1 = pos
- return _lin_interpol(
- self._nuclear_yield,
- cloudyield[pos],
- cloudyield[pos1],
- self._cloud_defs[tag][pos],
- self._cloud_defs[tag][pos1],
- )
+ return _lin_interpol(self._nuclear_yield, cloudyield[pos], cloudyield[pos1],
+ self._cloud_defs[tag][pos], self._cloud_defs[tag][pos1])
+
def activity_after_1min(self) -> float:
"""
@@ -137,39 +128,34 @@ def activity_after_1min(self) -> float:
"""
# formula from 2^19Bq/kT(TNT) as of
# Fission Products from Nuclear Weapons Explosions (Tovedal)
- # https://inis.iaea.org/collection/NCLCollectionStore/_Public/32/055/32055989.pdf
+ # https://inis.iaea.org/collection/NCLCollectionStore/_Public/32/055/32055989.pdf
return self._nuclear_yield * 2e19
def cloud_bottom(self):
- """cloud bottom in m"""
- return self._get_linear_cloud_def("bottom")
-
+ '''cloud bottom in m'''
+ return self._get_linear_cloud_def('bottom')
def cloud_top(self):
- """cloud top in m"""
- return self._get_linear_cloud_def("top")
-
+ '''cloud top in m'''
+ return self._get_linear_cloud_def('top')
def cloud_radius(self):
- """cloud radius in m"""
- # Typical radius of mushroom cloud after ~ 10 - 15 min is 1-3 km seen from different studies (Kanarska et al., 2009, Arthur et al., 2021)
- return 2500.0
-
+ '''cloud radius in m'''
+ # Typical radius of mushroom cloud after ~ 10 - 15 min is 1-3 km seen from different studies (Kanarska et al., 2009, Arthur et al., 2021)
+ return 2500.
def stem_radius(self):
- """cloud radius in m"""
- # Typical radius of mushroom cloud after ~ 10 - 15 min is 1-3 km seen from different studies (Kanarska et al., 2009, Arthur et al., 2021)
- return 0.0
+ '''cloud radius in m'''
+ # Typical radius of mushroom cloud after ~ 10 - 15 min is 1-3 km seen from different studies (Kanarska et al., 2009, Arthur et al., 2021)
+ return 0.
+
-class SnapInputBomb:
- """
+
+
+class SnapInputBomb():
+ '''
Description of the bomb-part of a snap.input file
Excluding meteorology and start-position
- """
-
- def __init__(
- self,
- nuclear_yield: float = 15,
- explosion_type: ExplosionType = ExplosionType.MIXED,
- ) -> None:
+ '''
+ def __init__(self, nuclear_yield: float = 15, explosion_type: ExplosionType = ExplosionType.MIXED) -> None:
"""
Parameters
----------
@@ -182,27 +168,25 @@ def __init__(
# _yield_parameters
# _radius_sizes
# _size_distribution
- self._component_basename = "Aerosol"
+ self._component_basename = 'Aerosol'
self._component_formatter = "{component}_{size:.1f}mym"
- self._gravity = [] # might be empty array or fixed gravity per size
- self._default_density = 2.95 # general rock/sand density
- self._densities = [] # might be empty array of densities per size
- self._minutes = 0.0 # explosion starts minutes after full hour
+ self._gravity = [] # might be empty array or fixed gravity per size
+ self._default_density = 2.95 # general rock/sand density
+ self._densities = [] # might be empty array of densities per size
+ self._minutes = 0. # explosion starts minutes after full hour
return
-
+
@property
def nuclear_yield(self) -> float:
"""nuclear yield in Mg(=kTonnes) TNT"""
return self._yield_parameters.nuclear_yield
-
+
@property
def explosion_type(self) -> ExplosionType:
return self._yield_parameters.explosion_type
- def set_bomb(
- self, nuclear_yield: float, explosion_type: ExplosionType = None
- ) -> None:
+ def set_bomb(self, nuclear_yield: float, explosion_type: ExplosionType = None) -> None:
"""
set yield, sizes, size_distribution and activity base on nuclear_yield and explosion_type
"""
@@ -223,7 +207,7 @@ def activity_after_1min(self) -> float:
def cloud_bottom(self) -> float:
"""cloud bottom height in m"""
return self._yield_parameters.cloud_bottom()
-
+
@property
def cloud_top(self) -> float:
"""cloud top height in m"""
@@ -238,7 +222,7 @@ def cloud_radius(self) -> float:
def stem_radius(self) -> float:
"""cloud top height in m"""
return self._yield_parameters.stem_radius()
-
+
@property
def total_activity(self) -> float:
"""total activity of cloud and stem in Bq"""
@@ -248,18 +232,17 @@ def total_activity(self) -> float:
def component_basename(self) -> str:
"""name of the component used in snap"""
return self._component_basename
-
+
def component_name(self, pos: int) -> str:
"""name of component of size[pos] in SNAP"""
- return self._component_formatter.format(
- component=self.component_basename, size=self.radius_sizes[pos]
- )
+ return self._component_formatter.format(component=self.component_basename,
+ size=self.radius_sizes[pos])
@property
def minutes(self) -> float:
"""offset in minutes the explosion start after start of SNAP run (starting at full hour)"""
return self._minutes
-
+
@minutes.setter
def minutes(self, minutes: float) -> None:
self._minutes = minutes
@@ -269,7 +252,7 @@ def minutes(self, minutes: float) -> None:
def radius_sizes(self) -> list[float]:
"""retrieve a list of radius sizes in µm for different size-classes"""
return self._radius_sizes
-
+
@radius_sizes.setter
def radius_sizes(self, radius_sizes: list[float]) -> None:
"""
@@ -279,7 +262,7 @@ def radius_sizes(self, radius_sizes: list[float]) -> None:
self._radius_sizes = radius_sizes
self._size_distribution = []
return
-
+
@property
def size_distribution(self) -> list[float]:
"""
@@ -288,55 +271,55 @@ def size_distribution(self) -> list[float]:
"""
sum_dist = 0
size_dist = []
- l_min = min(len(self.radius_sizes), len(self._size_distribution))
- for i in range(l_min):
+ l = min(len(self.radius_sizes), len(self._size_distribution))
+ for i in range(l):
nextsum = sum_dist + self._size_distribution[i]
if nextsum <= 1:
size_dist.append(self._size_distribution[i])
sum_dist = nextsum
else:
- size_dist.append(1 - sum_dist)
+ size_dist.append(1-sum_dist)
sum_dist = 1
# append equal size_distribution for the remaining parts
extras = len(self._radius_sizes) - len(self._size_distribution)
if extras > 0:
- frac = (1 - sum_dist) / extras
+ frac = (1-sum_dist)/extras
for i in range(extras):
- size_dist.append(frac)
+ size_dist.append(frac)
# assertion
sum_size_dist = sum(size_dist)
- if abs(1 - sum_size_dist) > 0.01:
+ if abs(1-sum_size_dist) > 0.01:
raise Exception(f"sum of size_dist == {sum_size_dist} != 1: {size_dist}")
self._size_distribution = size_dist
return self._size_distribution
@size_distribution.setter
def size_distribution(self, size_distribution: list[float]) -> None:
- if sum(size_distribution) > 1:
+ if (sum(size_distribution) > 1):
raise Exception(f"size_distribution > 1: {size_distribution}")
self._size_distribution = size_distribution
return
@property
def default_density(self) -> float:
- """density in g/cm³ used for all particle classes unless specified otherwise"""
+ '''density in g/cm³ used for all particle classes unless specified otherwise'''
return self._default_density
-
+
@default_density.setter
def default_density(self, default_density: float) -> None:
- """set the default density in g/cm^3"""
+ '''set the default density in g/cm^3'''
self._default_density = default_density
return
@property
def densities(self) -> list[float]:
- """list of densities in g/cm^3 for each size-class, uses default_density if none given"""
- """
+ ''' list of densities in g/cm^3 for each size-class, uses default_density if none given'''
+ '''
set the list of densities in g/cm^3 for each size-classes
a 0 or undefined density will give the default_density
a negative density will disable gravity
- """
+ '''
retlist = []
for i in range(len(self.radius_sizes)):
if i >= len(self._densities):
@@ -354,10 +337,11 @@ def densities(self, densities: list[float]):
self._densities = densities
return
+
def snap_input(self) -> str:
"""get the bomb-input as partial snap.input string"""
lines = []
- lines.append("MAX.PARTICLES.PER.RELEASE= 1000000\n")
+ lines.append('MAX.PARTICLES.PER.RELEASE= 1000000\n')
lines.append(f"** Explosive yield {self.nuclear_yield}ktonnes")
lines.append("TIME.RELEASE.PROFILE.BOMB")
@@ -379,17 +363,15 @@ def snap_input(self) -> str:
relupper.append(self.cloud_top)
relstem.append(self.stem_radius)
- lines.append(
- f"""
+ lines.append(f"""
RELEASE.MINUTE= {",".join(map(str, relmins))}
RELEASE.RADIUS.M= {",".join(map(str, relradius))}
RELEASE.LOWER.M= {",".join(map(str, rellower))}
RELEASE.UPPER.M= {",".join(map(str, relupper))}
RELEASE.MUSHROOM.STEM.RADIUS.M= {",".join(map(str, relstem))}
- """
- )
+ """)
- lines.append("* PARTICLE CLASSES")
+ lines.append('* PARTICLE CLASSES')
pclass_tmpl = """COMPONENT= {classname}
DRY.DEP.ON
WET.DEP.ON
@@ -402,54 +384,48 @@ def snap_input(self) -> str:
densities = self.densities
for i, radius in enumerate(self.radius_sizes):
dens = densities[i]
- gravity = ""
+ gravity = ''
if dens < 0:
- gravity = "GRAVITY.OFF"
+ gravity = 'GRAVITY.OFF'
else:
- gravity = "*GRAVITY.OFF"
- lines.append(
- pclass_tmpl.format(
- radius=radius,
- density=dens,
- classname=self.component_name(i),
- gravity=gravity,
- identification=i + 1,
- )
- )
-
+ gravity = '*GRAVITY.OFF'
+ lines.append(pclass_tmpl.format(radius=radius,
+ density=dens,
+ classname=self.component_name(i),
+ gravity=gravity,
+ identification=i+1))
+
for i, frac in enumerate(self.size_distribution):
size_activity = activity + [f"{self.activity_after_1min*frac:.3E}"]
- lines.append(
- f"RELEASE.BQ/STEP.COMP= {','.join(size_activity)} '{self.component_name(i)}'"
- )
+ lines.append(f"RELEASE.BQ/STEP.COMP= {','.join(size_activity)} '{self.component_name(i)}'")
- return "\n".join(lines) + "\n"
+ return "\n".join(lines) + "\n"
if __name__ == "__main__":
# unit test
nyield = 15
- yp = YieldParameters(0.25, ExplosionType.MIXED)
- assert abs(yp.cloud_bottom() - 750) < 0.1
- assert abs(yp.cloud_top() - 1125) < 0.1
- assert abs(yp.cloud_radius() - 2500) < 0.1
- assert abs(yp.stem_radius() - 0) < 0.1
+ yp = YieldParameters(.25, ExplosionType.MIXED)
+ assert(abs(yp.cloud_bottom()-750) < .1)
+ assert(abs(yp.cloud_top()-1125) < .1)
+ assert(abs(yp.cloud_radius()-2500) < .1)
+ assert(abs(yp.stem_radius()-0) < .1)
sib = SnapInputBomb(nyield)
- assert nyield == sib.nuclear_yield
- assert sib.radius_sizes[0] == 2.2
+ assert(nyield == sib.nuclear_yield)
+ assert(sib.radius_sizes[0] == 2.2)
print(sib.component_name(0))
- assert sib.component_name(0) == "Aerosol_2.2mym"
- assert abs(1 - sum(sib.size_distribution)) <= 0.01
- sib.radius_sizes = [1, 2, 3, 4]
- assert abs(1 - sum(sib.size_distribution)) <= 0.01
+ assert(sib.component_name(0) == 'Aerosol_2.2mym')
+ assert(abs(1-sum(sib.size_distribution)) <= .01)
+ sib.radius_sizes = [1,2,3,4]
+ assert(abs(1-sum(sib.size_distribution)) <= .01)
try:
- sib.size_distribution = [0.3, 0.4, 0.5, 0.6]
- assert False
- except Exception:
+ sib.size_distribution = [.3,.4,.5,.6]
+ assert(False)
+ except:
pass
- sib.size_distribution = [0.3, 0.4, 0.2]
- assert abs(sib.size_distribution[3] - 0.1) <= 0.01
+ sib.size_distribution = [.3,.4,.2]
+ assert(abs(sib.size_distribution[3]-0.1) <= 0.01)
sib.minutes = 30
print(sib.snap_input())
- # print(SnapInputBomb(.25).snap_input())
+ #print(SnapInputBomb(.25).snap_input())
diff --git a/utils/SnapPy/Snappy/SnapJob.py b/utils/SnapPy/Snappy/SnapJob.py
index 9e1aa7a1..b4693cc4 100644
--- a/utils/SnapPy/Snappy/SnapJob.py
+++ b/utils/SnapPy/Snappy/SnapJob.py
@@ -24,18 +24,15 @@
import os
import re
-
class UnknownModelException(Exception):
- """Exception when wrong model/prefix-name is send to SnapJob"""
-
+ '''Exception when wrong model/prefix-name is send to SnapJob'''
pass
-
class SnapJob:
"""tasks to work with the model SNAP, SNAPGLOBAL and TRAJ with EC-data"""
def __init__(self, task, hpc):
- """construct a snap-job with a task (see snapRemoteRunner) and a hpc"""
+ """ construct a snap-job with a task (see snapRemoteRunner) and a hpc"""
self.task = task
self.hpc = hpc
@@ -58,13 +55,13 @@ def get_input_files(self):
raise UnknownModelException("unknown model:" + self.task.model)
def job_script(self):
- """return a sge job-script for the different models
+ """return a sge job-script for the different models
allow for SNAP, SNAPGLOBAL, SNAPNORDIC, SNAPICONGLOBAL
and SNAPBOMB, SNAPBOMBGLOBAL, SNAPBOMBNORDIC, SNAPBOMBICONGLOBAL
"""
- if self.task.model.startswith("SNAPBOMB"):
+ if self.task.model.startswith('SNAPBOMB'):
task_model = self.task.model[8:]
- elif self.task.model.startswith("SNAP"):
+ elif self.task.model.startswith('SNAP'):
task_model = self.task.model[4:]
else:
raise UnknownModelException("unknown model:" + self.task.model)
diff --git a/utils/SnapPy/Snappy/Utils.py b/utils/SnapPy/Snappy/Utils.py
index df2f1733..951c6230 100644
--- a/utils/SnapPy/Snappy/Utils.py
+++ b/utils/SnapPy/Snappy/Utils.py
@@ -1,25 +1,25 @@
# SNAP: Servere Nuclear Accident Programme
# Copyright (C) 1992-2017 Norwegian Meteorological Institute
-#
-# This file is part of SNAP. SNAP is free software: you can
-# redistribute it and/or modify it under the terms of the
-# GNU General Public License as published by the
+#
+# This file is part of SNAP. SNAP is free software: you can
+# redistribute it and/or modify it under the terms of the
+# GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
-#
+#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
-#
+#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
#
-"""
+'''
Created on Oct 15, 2017
@author: heikok
-"""
+'''
import datetime
import numbers
@@ -30,164 +30,131 @@
def _parseLLNumber(llstr):
- """parse a latitude or longitude string to a decimal returning (decimal, character)
+ ''' parse a latitude or longitude string to a decimal returning (decimal, character)
where character can should be NSEW or empty.
Possible formats: -3.54, 3:5:3 S, 3° 5' 34"S
Raises a ValueError if the format doesn't match
- """
+ '''
if isinstance(llstr, numbers.Number):
- return (float(llstr), "")
+ return (float(llstr), '')
# remove + character coming from url-queries (space)
- llstr = llstr.replace("+", "")
+ llstr = llstr.replace('+','')
llstr = llstr.strip()
decimal = 0
-
+
# fetch and remove last character (NSEW)
- character = ""
- if re.search(r"[A-Za-z]$", llstr):
+ character = ''
+ if re.search(r'[A-Za-z]$', llstr):
character = llstr[-1].upper()
llstr = llstr[0:-1]
llstr = llstr.strip()
-
+
# just a number
- if re.search(r"^(-?\d+)$", llstr):
+ if re.search(r'^(-?\d+)$', llstr):
decimal = float(llstr)
- elif re.search(r"^(-?\d+\.\d+)$", llstr):
+ elif re.search(r'^(-?\d+\.\d+)$', llstr):
decimal = float(llstr)
else:
# degree:minutes(:second)
- m = re.search(r"^(-?\d+)\s*[:°]\s*(\d+)(\s*[\:\']\s*)?(\d+)?", llstr)
+ m = re.search(r'^(-?\d+)\s*[:°]\s*(\d+)(\s*[\:\']\s*)?(\d+)?', llstr)
if m:
- decimal = float(m.group(1)) + float(m.group(2)) / 60
+ decimal = float(m.group(1)) + float(m.group(2))/60
if m.group(4):
- decimal += float(m.group(4)) / 3600
+ decimal += float(m.group(4))/3600
else:
raise ValueError("unable to parse lon/lat number: '" + llstr + "'")
return (decimal, character.upper())
def parseLat(latStr):
- """parse a latitude string to decimal degrees, raise an exception on error
+ ''' parse a latitude string to decimal degrees, raise an exception on error
Possible formats: -3.54, 3:5:3 S, 3° 5' 34"S
- """
+ '''
try:
(decimal, northSouth) = _parseLLNumber(latStr)
except TypeError as te:
raise ValueError("cannot parse latitude: {}".format(te))
-
- if northSouth == "S":
+
+ if northSouth == 'S':
decimal *= -1
- elif northSouth == "" or northSouth == "N":
+ elif northSouth == "" or northSouth == 'N':
pass
else:
- raise ValueError("Not a latitude: " + latStr)
+ raise ValueError("Not a latitude: "+latStr)
if decimal < -90.0001 or decimal > 90.0001:
- raise ValueError("Not a latitude: " + latStr)
+ raise ValueError("Not a latitude: "+latStr)
return decimal
-
def parseLon(lonStr):
- """parse a longitude string to decimal degrees, raise an exception on error
+ ''' parse a longitude string to decimal degrees, raise an exception on error
Possible formats: -3.54, 3:5:3 W, 3° 5' 34"W
- """
+ '''
try:
(decimal, eastWest) = _parseLLNumber(lonStr)
except TypeError as te:
raise ValueError("cannot parse longitude: {}".format(te))
- if eastWest == "W":
+ if eastWest == 'W':
decimal *= -1
- elif eastWest == "" or eastWest == "E":
+ elif eastWest == "" or eastWest == 'E':
pass
else:
- raise ValueError("Not a longitude: " + lonStr)
+ raise ValueError("Not a longitude: "+lonStr)
if decimal < -180.0001 or decimal > 180.0001:
- raise ValueError("Not a longitude: " + lonStr)
+ raise ValueError("Not a longitude: "+lonStr)
return decimal
-
class IsLatLonTests(unittest.TestCase):
+
def testParseLat(self):
- self.assertAlmostEqual(
- parseLat(-3.54), -3.54, msg='parseLat("-3.54")', delta=1e-3
- )
- self.assertAlmostEqual(
- parseLat("-3.54"), -3.54, msg='parseLat("-3.54")', delta=1e-3
- )
- self.assertAlmostEqual(
- parseLat("3.54 S"), -3.54, msg='parseLat("-3.54")', delta=1e-3
- )
- self.assertAlmostEqual(
- parseLat("3:5:3 S"), -3.0841, msg='parseLat("3:5:3 S")', delta=1e-3
- )
- self.assertAlmostEqual(
- parseLat("3 °5' 3\" S"), -3.0841, msg='parseLat("3 °5\' 3" S")', delta=1e-3
- )
- self.assertAlmostEqual(
- parseLat("60°5'5\"N"), 60.084722, msg='parseLat("60°5\'5"N")', delta=1e-3
- )
- self.assertAlmostEqual(
- parseLat("8°20′2+″S+"), -8.333, msg='parseLat("8°20′27″S ")', delta=1e-3
- )
+ self.assertAlmostEqual(parseLat(-3.54), -3.54, msg="parseLat(\"-3.54\")", delta=1e-3)
+ self.assertAlmostEqual(parseLat("-3.54"), -3.54, msg="parseLat(\"-3.54\")", delta=1e-3)
+ self.assertAlmostEqual(parseLat("3.54 S"), -3.54, msg="parseLat(\"-3.54\")", delta=1e-3)
+ self.assertAlmostEqual(parseLat("3:5:3 S"), -3.0841, msg="parseLat(\"3:5:3 S\")", delta=1e-3)
+ self.assertAlmostEqual(parseLat("3 °5' 3\" S"), -3.0841, msg="parseLat(\"3 °5' 3\" S\")", delta=1e-3)
+ self.assertAlmostEqual(parseLat("60°5'5\"N"), 60.084722, msg="parseLat(\"60°5'5\"N\")", delta=1e-3)
+ self.assertAlmostEqual(parseLat("8°20′2+″S+"), -8.333, msg="parseLat(\"8°20′27″S \")", delta=1e-3)
self.assertRaises(ValueError, parseLat, "195")
-
+
def testParseLon(self):
- self.assertAlmostEqual(
- parseLon(-3.54), -3.54, msg='parseLon("-3.54")', delta=1e-3
- )
- self.assertAlmostEqual(
- parseLon("-3.54"), -3.54, msg='parseLon("-3.54")', delta=1e-3
- )
- self.assertAlmostEqual(
- parseLon("3.54 W"), -3.54, msg='parseLon("-3.54")', delta=1e-3
- )
- self.assertAlmostEqual(
- parseLon("3:5:3 W"), -3.0841, msg='parseLon("3:5:3 W")', delta=1e-3
- )
- self.assertAlmostEqual(
- parseLon("10:5:5W"), -10.084722, msg='parseLon("10:5:5W")', delta=1e-3
- )
- self.assertAlmostEqual(
- parseLon("10:5W"), -10.08333, msg='parseLon("10:5W")', delta=1e-3
- )
- self.assertAlmostEqual(
- parseLon("3 °5' 3\" W"), -3.0841, msg='parseLon("3 °5\' 3" W")', delta=1e-3
- )
- self.assertAlmostEqual(
- parseLon("10°4'W"), -10.06666, msg='parseLon("10°4\'W")', delta=1e-3
- )
+ self.assertAlmostEqual(parseLon(-3.54), -3.54, msg="parseLon(\"-3.54\")", delta=1e-3)
+ self.assertAlmostEqual(parseLon("-3.54"), -3.54, msg="parseLon(\"-3.54\")", delta=1e-3)
+ self.assertAlmostEqual(parseLon("3.54 W"), -3.54, msg="parseLon(\"-3.54\")", delta=1e-3)
+ self.assertAlmostEqual(parseLon("3:5:3 W"), -3.0841, msg="parseLon(\"3:5:3 W\")", delta=1e-3)
+ self.assertAlmostEqual(parseLon("10:5:5W"), -10.084722, msg="parseLon(\"10:5:5W\")", delta=1e-3)
+ self.assertAlmostEqual(parseLon("10:5W"), -10.08333, msg="parseLon(\"10:5W\")", delta=1e-3)
+ self.assertAlmostEqual(parseLon("3 °5' 3\" W"), -3.0841, msg="parseLon(\"3 °5' 3\" W\")", delta=1e-3)
+ self.assertAlmostEqual(parseLon("10°4'W"), -10.06666, msg="parseLon(\"10°4'W\")", delta=1e-3)
self.assertRaises(ValueError, parseLon, "370")
def dirIsWritable(directory):
- """check if directory is writable"""
+ '''check if directory is writable'''
if not directory:
return False
try:
- with tempfile.TemporaryFile(dir=directory):
+ with tempfile.TemporaryFile(dir=directory) as fh:
return True
- except Exception:
+ except:
return False
-
def delete_oldfiles(dir_to_search, age_in_days):
- """delete files older than age_in_days"""
+ '''delete files older than age_in_days'''
for dirpath, dirnames, filenames in os.walk(dir_to_search):
for file in filenames:
curpath = os.path.join(dirpath, file)
try:
- file_modified = datetime.datetime.fromtimestamp(
- os.lstat(curpath).st_mtime
- )
- if datetime.datetime.now() - file_modified > datetime.timedelta(
- days=age_in_days
- ):
+ file_modified = datetime.datetime.fromtimestamp(os.lstat(curpath).st_mtime)
+ if datetime.datetime.now() - file_modified > datetime.timedelta(days=age_in_days):
os.remove(curpath)
except FileNotFoundError:
pass
-if __name__ == "__main__":
- unittest.main()
+
+
+
+if __name__ == '__main__':
+ unittest.main()
\ No newline at end of file
diff --git a/utils/SnapPy/Snappy/__init__.py b/utils/SnapPy/Snappy/__init__.py
index 11f2921d..678d4f39 100644
--- a/utils/SnapPy/Snappy/__init__.py
+++ b/utils/SnapPy/Snappy/__init__.py
@@ -1,17 +1,17 @@
# SNAP: Servere Nuclear Accident Programme
# Copyright (C) 1992-2017 Norwegian Meteorological Institute
-#
-# This file is part of SNAP. SNAP is free software: you can
-# redistribute it and/or modify it under the terms of the
-# GNU General Public License as published by the
+#
+# This file is part of SNAP. SNAP is free software: you can
+# redistribute it and/or modify it under the terms of the
+# GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
-#
+#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
-#
+#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
#
diff --git a/utils/SnapPy/Snappy/read_dosecoefficients_icrp.py b/utils/SnapPy/Snappy/read_dosecoefficients_icrp.py
index 644216a8..7204105f 100755
--- a/utils/SnapPy/Snappy/read_dosecoefficients_icrp.py
+++ b/utils/SnapPy/Snappy/read_dosecoefficients_icrp.py
@@ -1,5 +1,6 @@
#! /usr/bin/env python3
import numpy as np
+import pathlib
import zipfile
import csv
import io
diff --git a/utils/SnapPy/calcTotalAsh.py b/utils/SnapPy/calcTotalAsh.py
index eb691493..139e780d 100644
--- a/utils/SnapPy/calcTotalAsh.py
+++ b/utils/SnapPy/calcTotalAsh.py
@@ -9,85 +9,77 @@
print(f"usage: {sys.argv[0]} ncfile_hour [ncfile_inst]", file=sys.stderr)
sys.exit(1)
-
def cell_area(lat, lon):
- """cell_area in m^2"""
- area = np.zeros((lat.shape[0], lon.shape[0]), "f8")
+ ''' cell_area in m^2'''
+ area = np.zeros((lat.shape[0], lon.shape[0]), 'f8')
latVals = np.radians(lat)
lonVals = np.radians(lon)
- dlon = abs(lonVals[1] - lonVals[0])
- dlat = abs(latVals[1] - latVals[0])
-
+ dlon = abs(lonVals[1]-lonVals[0])
+ dlat = abs(latVals[1]-latVals[0])
+
for i in range(len(latVals)):
cl = math.cos(latVals[i])
# formular: A = R^2 * cos(lat) * dLat * dLon
- area[i, :] = 6370 * 6370 * cl * dlat * dlon
-
- return area * 1000 * 1000
+ area[i,:] = 6370*6370 * cl * dlat * dlon
+ return area*1000*1000
rows = []
-with netCDF4.Dataset(sys.argv[1], "r") as nc:
- varnames = ("COLUMN_ASH_kmax", "COLUMN_ASH")
+with netCDF4.Dataset(sys.argv[1], 'r') as nc:
+ varnames = ('COLUMN_ASH_kmax', 'COLUMN_ASH')
for var in varnames:
if var in nc.variables:
break
else:
- var = None
- lon = nc["lon"][:]
- lat = nc["lat"][:]
+ var = None
+ lon = nc['lon'][:]
+ lat = nc['lat'][:]
area = cell_area(lat, lon)
- times = netCDF4.num2date(nc["time"][:], nc["time"].units)
+ times = netCDF4.num2date(nc['time'][:], nc['time'].units)
wDEP = 0
dDEP = 0
- for i, t in enumerate(times):
- data = nc[var][i, :]
- current = np.sum(data * area) / 1e12 # ug->Mg
- if "WDEP_ASH" in nc.variables:
- wDEP = np.sum(nc["WDEP_ASH"][i, :] * area) / 1e9 / 3600 # mg->Mg ; /h -> /s
- dDEP = (
- np.sum(nc["DDEP_ASH_m2Grid"][i, :] * area) / 1e9 / 3600
- ) # mg->Mg ; /h -> /s
- rows.append(
- {
- "current": current,
- "dDep": dDEP,
- "wDep": wDEP,
- }
- )
+ for i,t in enumerate(times):
+ data = nc[var][i,:]
+ current = np.sum(data*area)/1e12 # ug->Mg
+ if 'WDEP_ASH' in nc.variables:
+ wDEP = np.sum(nc['WDEP_ASH'][i,:]*area)/1e9 / 3600 # mg->Mg ; /h -> /s
+ dDEP = np.sum(nc['DDEP_ASH_m2Grid'][i,:]*area)/1e9 / 3600 # mg->Mg ; /h -> /s
+ rows.append({
+ 'current': current,
+ 'dDep': dDEP,
+ 'wDep': wDEP,
+ })
if len(sys.argv) > 2:
# instant if available
- with netCDF4.Dataset(sys.argv[2], "r") as nc:
- varnames = ("COLUMN_ASH_kmax", "COLUMN_ASH")
+ with netCDF4.Dataset(sys.argv[2], 'r') as nc:
+ varnames = ('COLUMN_ASH_kmax', 'COLUMN_ASH')
for var in varnames:
if var in nc.variables:
break
else:
- var = None
- for i, t in enumerate(times):
- data = nc[var][i, :]
- current = np.sum(data * area) / 1e12 # ug->Mg
- rows[i]["current"] = current
+ var = None
+ for i,t in enumerate(times):
+ data = nc[var][i,:]
+ current = np.sum(data*area)/1e12 # ug->Mg
+ rows[i]['current'] = current
-print("#i\tdate\ttotal_ash[Mg]\tair_rate\tddep_rate\twdep_rate\temis_rate[Mg/s]")
+print('#i\tdate\ttotal_ash[Mg]\tair_rate\tddep_rate\twdep_rate\temis_rate[Mg/s]')
atmosChange = 0
for i, row in enumerate(rows):
if i > 0:
- atmosChange = (row["current"] - rows[i - 1]["current"]) / 3600 # /h -> /s
+ atmosChange = (row['current'] - rows[i-1]['current']) / 3600 # /h -> /s
emis = atmosChange + row["dDep"] + row["wDep"]
- print(
- f'{i}\t{t}\t{row["current"]:.3f}\t{atmosChange:.3f}\t{row["dDep"]:.3f}\t{row["wDep"]:.3f}\t{emis:.3f}'
- )
-
+ print(f'{i}\t{t}\t{row["current"]:.3f}\t{atmosChange:.3f}\t{row["dDep"]:.3f}\t{row["wDep"]:.3f}\t{emis:.3f}')
+
if False:
import matplotlib.pyplot as plt
import cartopy.crs as ccrs
-
# plot area
ax = plt.axes(projection=ccrs.PlateCarree())
- cf = plt.contourf(lon, lat, area / 1000 / 1000, 60, transform=ccrs.PlateCarree())
+ cf = plt.contourf(lon, lat, area/1000/1000, 60,
+ transform=ccrs.PlateCarree())
ax.coastlines()
- plt.colorbar(cf, orientation="horizontal")
+ plt.colorbar(cf, orientation='horizontal')
plt.show()
diff --git a/utils/SnapPy/setup.py b/utils/SnapPy/setup.py
index c7335ea8..b5702fa7 100644
--- a/utils/SnapPy/setup.py
+++ b/utils/SnapPy/setup.py
@@ -41,9 +41,11 @@
"snapRunnerNpps",
"snapRunnerNpp",
"eemepModelRunner",
- "snapRemoteRunner.py",
+ "snapRemoteRunner.py"
],
entry_points={
- "console_scripts": ["snapAddBombIsotopes = Snappy.AddBombIsotopes:main"]
- },
+ 'console_scripts': [
+ 'snapAddBombIsotopes = Snappy.AddBombIsotopes:main'
+ ]
+ }
)
diff --git a/utils/SnapPy/snapEnsPlot.py b/utils/SnapPy/snapEnsPlot.py
index 79421dcf..918bc4dc 100644
--- a/utils/SnapPy/snapEnsPlot.py
+++ b/utils/SnapPy/snapEnsPlot.py
@@ -3,110 +3,67 @@
import argparse
import warnings
import cartopy
+import datetime
import matplotlib
-
+from pyproj import Proj
+matplotlib.use('Agg')
from mpl_toolkits.axes_grid1.inset_locator import inset_axes
from matplotlib import pyplot as plt
import netCDF4
import os
import sys
-import logging
import numpy as np
-matplotlib.use("Agg")
-
# suppress some warnings
-warnings.filterwarnings(
- "ignore",
- category=UserWarning,
- message="Warning: 'partition' will ignore the 'mask' of the MaskedArray.",
-)
+warnings.filterwarnings("ignore", category=UserWarning,
+ message="Warning: 'partition' will ignore the 'mask' of the MaskedArray.")
# shapefile.py uses root logger :-( and warns a lot about GSHHS
-
+import logging
logging.root.setLevel(logging.ERROR)
+def plotMap(data, x, y, ax, title="", title_loc="center", clevs=[10,100,300,1000,3000,10000,30000,100000, 300000, 10000000], colors=None, extend='max'):
+ ax.add_feature(cartopy.feature.GSHHSFeature(scale='low', facecolor='none', edgecolor='whitesmoke', linewidth=.2), zorder=100)
+ ax.add_feature(cartopy.feature.BORDERS, edgecolor="lightgray", linewidth=.5, zorder=100)
+ #ax.gridlines(draw_labels=True)
+ ax.gridlines(edgecolor="lightgray", linewidth=.3, zorder=100)
-def plotMap(
- data,
- x,
- y,
- ax,
- title="",
- title_loc="center",
- clevs=[10, 100, 300, 1000, 3000, 10000, 30000, 100000, 300000, 10000000],
- colors=None,
- extend="max",
-):
- ax.add_feature(
- cartopy.feature.GSHHSFeature(
- scale="low", facecolor="none", edgecolor="whitesmoke", linewidth=0.2
- ),
- zorder=100,
- )
- ax.add_feature(
- cartopy.feature.BORDERS, edgecolor="lightgray", linewidth=0.5, zorder=100
- )
- # ax.gridlines(draw_labels=True)
- ax.gridlines(edgecolor="lightgray", linewidth=0.3, zorder=100)
-
- data.shape[0]
- data.shape[1]
+ ny = data.shape[0]
+ nx = data.shape[1]
# draw filled contours.
if colors is None:
- colors = [plt.cm.hsv(x) for x in np.linspace(0.5, 0, len(clevs))]
- cs = ax.contourf(x, y, data, clevs, colors=colors, extend=extend, zorder=50)
+ colors = [ plt.cm.hsv(x) for x in np.linspace(0.5, 0, len(clevs)) ]
+ cs = ax.contourf(x,y,data,clevs,colors=colors, extend=extend, zorder=50)
# add title
ax.set_title(title, loc=title_loc)
- ax.add_feature(
- cartopy.feature.OCEAN, facecolor="#aecfe0", edgecolor="none", zorder=10
- ) # #aecfe0 = osm-sea
- ax.add_feature(
- cartopy.feature.LAND, facecolor="#f2efe9", edgecolor="none", zorder=10
- ) # f2efe9 = osm-land
- ax.add_feature(
- cartopy.feature.LAKES,
- facecolor="#aecfe0",
- edgecolor="whitesmoke",
- linewidth=0.2,
- zorder=20,
- )
+ ax.add_feature(cartopy.feature.OCEAN,facecolor="#aecfe0", edgecolor='none', zorder=10) # #aecfe0 = osm-sea
+ ax.add_feature(cartopy.feature.LAND, facecolor="#f2efe9", edgecolor='none', zorder=10) # f2efe9 = osm-land
+ ax.add_feature(cartopy.feature.LAKES,facecolor="#aecfe0", edgecolor='whitesmoke', linewidth=.2, zorder=20)
return cs
-def plotMapGrid(
- data,
- x,
- y,
- ax,
- title="",
- title_loc="center",
- clevs=[10, 100, 300, 1000, 3000, 10000, 30000, 100000, 300000, 10000000],
- colors=None,
-):
- ax.add_feature(
- cartopy.feature.GSHHSFeature(scale="auto", facecolor="none", edgecolor="black")
- )
+def plotMapGrid(data, x, y, ax, title="", title_loc="center", clevs=[10,100,300,1000,3000,10000,30000,100000, 300000, 10000000], colors=None):
+ ax.add_feature(cartopy.feature.GSHHSFeature(scale='auto', facecolor='none', edgecolor='black'))
ax.gridlines()
ax.add_feature(cartopy.feature.BORDERS)
- data.shape[0]
- data.shape[1]
+ ny = data.shape[0]
+ nx = data.shape[1]
# draw filled contours.
if colors is None:
- colors = [plt.cm.hsv(x) for x in np.linspace(0.5, 0, len(clevs))]
- cmap = plt.get_cmap("tab10")
+ colors = [ plt.cm.hsv(x) for x in np.linspace(0.5, 0, len(clevs)) ]
+ cmap = plt.get_cmap('tab10')
norm = matplotlib.colors.BoundaryNorm(clevs, ncolors=cmap.N, clip=True)
- cs = ax.pcolormesh(x, y, data, norm=norm, cmap=cmap)
+ cs = ax.pcolormesh(x,y,data,norm=norm, cmap=cmap)
# add title
ax.set_title(title, loc=title_loc)
return cs
def rgbToColor(rgb):
- color = [int(x) / 255 for x in rgb.split(":")]
+ color = [int(x)/255 for x in rgb.split(':')]
if len(color) != 4:
- color.append(1.0)
+ color.append(1.)
return color
@@ -119,7 +76,7 @@ def snapens(ncfiles, hour, outfile):
toa = []
deps = []
for ncf in ncfiles:
- with netCDF4.Dataset(ncf, "r") as nc:
+ with netCDF4.Dataset(ncf, 'r') as nc:
if not title:
title = nc.title
if not startDT:
@@ -130,242 +87,198 @@ def snapens(ncfiles, hour, outfile):
startDT = times[0] - step
stepH = step.seconds // 3600 + step.days * 24
if (hour % stepH) == 0:
- pos = hour // stepH - 1
+ pos = hour//stepH - 1
endDT = times[pos]
else:
- print(
- f"cannot devide {hour} forecast_hour by {stepH}h timesteps",
- sys.stderr,
- )
+ print(f"cannot devide {hour} forecast_hour by {stepH}h timesteps", sys.stderr)
sys.exit(1)
- if "time_of_arrival" not in nc.variables:
- print(
- f"time_of_arrival not in {ncf}, please run 'snapAddToa {ncf}",
- file=sys.stderr,
- )
+ if not "time_of_arrival" in nc.variables:
+ print(f"time_of_arrival not in {ncf}, please run 'snapAddToa {ncf}", file=sys.stderr)
sys.exit(2)
- toa.append(nc["time_of_arrival"][0, :])
+ toa.append(nc["time_of_arrival"][0,:])
fillvalue = nc["time_of_arrival"]._FillValue
- deps.append(nc["Cs137_acc_total_deposition"][pos, :])
+ deps.append(nc["Cs137_acc_total_deposition"][pos,:])
lons = nc["longitude"][:]
lats = nc["latitude"][:]
x = nc["x"][:]
y = nc["y"][:]
toa = np.stack(toa)
toa = toa.filled(fill_value=fillvalue)
- toa[toa == fillvalue] = (steps + 1) * stepH
- toaPerc = np.percentile(toa, [10, 50, 90], axis=0)
+ toa[toa == fillvalue] = (steps+1)*stepH
+ toaPerc = np.percentile(toa, [10,50,90], axis=0)
deps = np.stack(deps)
- depsPerc = np.percentile(deps, [10, 50, 90], axis=0)
+ depsPerc = np.percentile(deps, [10,50,90], axis=0)
depTH = []
for th in [1e3, 1e4, 1e5]:
- depTH.append(np.sum(deps > th, axis=0) * 100 / deps.shape[0])
+ depTH.append(np.sum(deps > th, axis=0)*100/deps.shape[0])
# and the plot
formatter = matplotlib.ticker.ScalarFormatter()
- formatter.set_powerlimits((-3, 10))
+ formatter.set_powerlimits((-3,10))
fig = plt.figure(figsize=(12, 10.7))
- fig.suptitle(
- f"{title}+{hour}hours ({endDT:%Y-%m-%d %H}:00Z). Uncertainty based upon {deps.shape[0]} members.",
- y=0.92,
- )
+ fig.suptitle(f'{title}+{hour}hours ({endDT:%Y-%m-%d %H}:00Z). Uncertainty based upon {deps.shape[0]} members.',
+ y=0.92)
proj = cartopy.crs.PlateCarree()
- with netCDF4.Dataset(ncfiles[0], "r") as nc:
+ with netCDF4.Dataset(ncfiles[0], 'r') as nc:
if "grid_mapping" in nc["Cs137_acc_total_deposition"].ncattrs():
grid_attrs = {}
for attr in nc[nc["Cs137_acc_total_deposition"].grid_mapping].ncattrs():
- grid_attrs[attr] = nc[
- nc["Cs137_acc_total_deposition"].grid_mapping
- ].getncattr(attr)
+ grid_attrs[attr] = nc[nc["Cs137_acc_total_deposition"].grid_mapping].getncattr(attr)
# print(grid_attrs)
- if grid_attrs["grid_mapping_name"] == "lambert_conformal_conic":
- proj = cartopy.crs.LambertConformal(
- central_longitude=grid_attrs["longitude_of_central_meridian"],
- central_latitude=grid_attrs["latitude_of_projection_origin"],
- standard_parallels=[grid_attrs["standard_parallel"]],
- globe=cartopy.crs.Globe(
- semiminor_axis=6371000, semimajor_axis=6371000
- ),
- )
+ if grid_attrs['grid_mapping_name'] == 'lambert_conformal_conic':
+ proj = cartopy.crs.LambertConformal(central_longitude=grid_attrs['longitude_of_central_meridian'],
+ central_latitude=grid_attrs['latitude_of_projection_origin'],
+ standard_parallels=[grid_attrs['standard_parallel']],
+ globe=cartopy.crs.Globe(semiminor_axis=6371000,
+ semimajor_axis=6371000))
data_x = x
data_y = y
else:
- print(
- f"unknown grid_mapping_name {grid_attrs}, trying latlon/PlateCaree",
- file=sys.stderr,
- )
+ print(f"unknown grid_mapping_name {grid_attrs}, trying latlon/PlateCaree", file=sys.stderr)
data_x = lons
data_y = lats
- colors = ("g", "y", "tab:orange", "r", "tab:red")
- ax = fig.add_subplot(3, 3, 1, projection=proj)
- ax.set_extent([x[50], x[-50], y[25], y[-25]], crs=proj)
- cs = plotMap(
- depsPerc[0, :] / 1000,
- title="Cs-137 deps: 10 perc.",
- title_loc="left",
- ax=ax,
- colors=colors,
- clevs=[0.1, 1, 10, 100, 1000],
- x=data_x,
- y=data_y,
- )
- ax = fig.add_subplot(3, 3, 2, projection=proj)
- ax.set_extent([x[50], x[-50], y[25], y[-25]], crs=proj)
- cs = plotMap(
- depsPerc[1, :] / 1000,
- title="median",
- ax=ax,
- colors=colors,
- clevs=[0.1, 1, 10, 100, 1000],
- x=data_x,
- y=data_y,
- )
- ax = fig.add_subplot(3, 3, 3, projection=proj)
- ax.set_extent([x[50], x[-50], y[25], y[-25]], crs=proj)
- cs = plotMap(
- depsPerc[2, :] / 1000,
- title="90 percentile",
- ax=ax,
- colors=colors,
- clevs=[0.1, 1, 10, 100, 1000],
- x=data_x,
- y=data_y,
- )
- axins = inset_axes(
- ax,
- width="3%",
- height="95%",
- loc="lower right",
- bbox_to_anchor=(0.0, 0.0, 1.05, 1),
- bbox_transform=ax.transAxes,
- borderpad=0,
- )
- cbar = fig.colorbar(cs, cax=axins, format=formatter, orientation="vertical")
- cbar.set_label("kBq/m²")
+ colors=('g', 'y', 'tab:orange', 'r', 'tab:red')
+ ax = fig.add_subplot(3,3, 1, projection=proj)
+ ax.set_extent([x[50],x[-50],y[25],y[-25]], crs=proj)
+ cs = plotMap(depsPerc[0,:]/1000,
+ title="Cs-137 deps: 10 perc.",
+ title_loc="left",
+ ax=ax,
+ colors=colors,
+ clevs=[0.1,1, 10, 100, 1000],
+ x=data_x, y=data_y)
+ ax = fig.add_subplot(3,3, 2, projection=proj)
+ ax.set_extent([x[50],x[-50],y[25],y[-25]], crs=proj)
+ cs = plotMap(depsPerc[1,:]/1000,
+ title="median",
+ ax=ax,
+ colors=colors,
+ clevs=[0.1,1, 10, 100, 1000],
+ x=data_x, y=data_y)
+ ax = fig.add_subplot(3,3, 3, projection=proj)
+ ax.set_extent([x[50],x[-50],y[25],y[-25]], crs=proj)
+ cs = plotMap(depsPerc[2,:]/1000,
+ title="90 percentile",
+ ax=ax,
+ colors=colors,
+ clevs=[0.1,1, 10, 100, 1000],
+ x=data_x, y=data_y)
+ axins = inset_axes(ax,
+ width="3%",
+ height="95%",
+ loc='lower right',
+ bbox_to_anchor=(0., 0., 1.05, 1),
+ bbox_transform=ax.transAxes,
+ borderpad=0,
+ )
+ cbar = fig.colorbar(cs, cax=axins, format=formatter, orientation='vertical')
+ cbar.set_label('kBq/m²')
+
+ ax = fig.add_subplot(3,3, 4, projection=proj)
+ ax.set_extent([x[50],x[-50],y[25],y[-25]], crs=proj)
+ colors = [ plt.cm.Paired(x) for x in [1,10,7,5,9] ]
+ cs = plotMap(depTH[0],
+ title="Probability: dep. > 1kBq/m2",
+ title_loc="left",
+ ax=ax,
+ colors=colors,
+ clevs=[10,30,50,70,90],
+ x=data_x, y=data_y)
+ ax = fig.add_subplot(3,3, 5, projection=proj)
+ ax.set_extent([x[50],x[-50],y[25],y[-25]], crs=proj)
+ cs = plotMap(depTH[1],
+ title="depo. > 10kBq/m2",
+ ax=ax,
+ colors=colors,
+ clevs=[10,30,50,70,90],
+ x=data_x, y=data_y)
+ ax = fig.add_subplot(3,3, 6, projection=proj)
+ ax.set_extent([x[50],x[-50],y[25],y[-25]], crs=proj)
+ cs = plotMap(depTH[2],
+ title="depo. > 100kBq/m2",
+ ax=ax,
+ colors=colors,
+ clevs=[10,30,50,70,90],
+ x=data_x, y=data_y)
+ axins = inset_axes(ax,
+ width="3%",
+ height="95%",
+ loc='lower right',
+ bbox_to_anchor=(0., 0., 1.05, 1),
+ bbox_transform=ax.transAxes,
+ borderpad=0,
+ )
+ cbar = fig.colorbar(cs, cax=axins, format=formatter, orientation='vertical')
+ cbar.set_label('%')
- ax = fig.add_subplot(3, 3, 4, projection=proj)
- ax.set_extent([x[50], x[-50], y[25], y[-25]], crs=proj)
- colors = [plt.cm.Paired(x) for x in [1, 10, 7, 5, 9]]
- cs = plotMap(
- depTH[0],
- title="Probability: dep. > 1kBq/m2",
- title_loc="left",
- ax=ax,
- colors=colors,
- clevs=[10, 30, 50, 70, 90],
- x=data_x,
- y=data_y,
- )
- ax = fig.add_subplot(3, 3, 5, projection=proj)
- ax.set_extent([x[50], x[-50], y[25], y[-25]], crs=proj)
- cs = plotMap(
- depTH[1],
- title="depo. > 10kBq/m2",
- ax=ax,
- colors=colors,
- clevs=[10, 30, 50, 70, 90],
- x=data_x,
- y=data_y,
- )
- ax = fig.add_subplot(3, 3, 6, projection=proj)
- ax.set_extent([x[50], x[-50], y[25], y[-25]], crs=proj)
- cs = plotMap(
- depTH[2],
- title="depo. > 100kBq/m2",
- ax=ax,
- colors=colors,
- clevs=[10, 30, 50, 70, 90],
- x=data_x,
- y=data_y,
- )
- axins = inset_axes(
- ax,
- width="3%",
- height="95%",
- loc="lower right",
- bbox_to_anchor=(0.0, 0.0, 1.05, 1),
- bbox_transform=ax.transAxes,
- borderpad=0,
- )
- cbar = fig.colorbar(cs, cax=axins, format=formatter, orientation="vertical")
- cbar.set_label("%")
# Time of arrival
- ax = fig.add_subplot(3, 3, 7, projection=proj)
- ax.set_extent([x[50], x[-50], y[25], y[-25]], crs=proj)
- clevs = range(0, (steps + 1) * stepH, 2 * stepH)
- colors = [plt.cm.jet(x) for x in np.linspace(0.95, 0.1, len(clevs))]
+ ax = fig.add_subplot(3,3, 7, projection=proj)
+ ax.set_extent([x[50],x[-50],y[25],y[-25]], crs=proj)
+ clevs=range(0,(steps+1)*stepH,2*stepH)
+ colors = [ plt.cm.jet(x) for x in np.linspace(0.95,0.1,len(clevs)) ]
# dsa colors up to 48 hours
- colors[0] = rgbToColor("128:0:255")
- colors[1] = rgbToColor("128:128:255")
- colors[2] = rgbToColor("128:128:192")
- colors[3] = rgbToColor("192:192:192")
- cs = plotMap(
- toaPerc[2, :],
- title="Time of arrival: 90 perc.",
- title_loc="left",
- ax=ax,
- colors=colors,
- clevs=clevs,
- extend=None,
- x=data_x,
- y=data_y,
- )
- ax = fig.add_subplot(3, 3, 8, projection=proj)
- ax.set_extent([x[50], x[-50], y[25], y[-25]], crs=proj)
- cs = plotMap(
- toaPerc[1, :],
- title="median",
- ax=ax,
- colors=colors,
- clevs=clevs,
- extend=None,
- x=data_x,
- y=data_y,
- )
- ax = fig.add_subplot(3, 3, 9, projection=proj)
- ax.set_extent([x[50], x[-50], y[25], y[-25]], crs=proj)
- cs = plotMap(
- toaPerc[0, :],
- title="10 percentile",
- ax=ax,
- colors=colors,
- clevs=clevs,
- extend=None,
- x=data_x,
- y=data_y,
- )
- axins = inset_axes(
- ax,
- width="3%",
- height="95%",
- loc="lower right",
- bbox_to_anchor=(0.0, 0.0, 1.05, 1),
- bbox_transform=ax.transAxes,
- borderpad=0,
- )
- cbar = fig.colorbar(cs, cax=axins, format=formatter, orientation="vertical")
- cbar.set_label("hours")
+ colors[0] = rgbToColor('128:0:255')
+ colors[1] = rgbToColor('128:128:255')
+ colors[2] = rgbToColor('128:128:192')
+ colors[3] = rgbToColor('192:192:192')
+ cs = plotMap(toaPerc[2,:],
+ title="Time of arrival: 90 perc.",
+ title_loc="left",
+ ax=ax,
+ colors=colors,
+ clevs=clevs,
+ extend=None,
+ x=data_x, y=data_y)
+ ax = fig.add_subplot(3,3, 8, projection=proj)
+ ax.set_extent([x[50],x[-50],y[25],y[-25]], crs=proj)
+ cs = plotMap(toaPerc[1,:],
+ title="median",
+ ax=ax,
+ colors=colors,
+ clevs=clevs,
+ extend=None,
+ x=data_x, y=data_y)
+ ax = fig.add_subplot(3,3, 9, projection=proj)
+ ax.set_extent([x[50],x[-50],y[25],y[-25]], crs=proj)
+ cs = plotMap(toaPerc[0,:],
+ title="10 percentile",
+ ax=ax,
+ colors=colors,
+ clevs=clevs,
+ extend=None,
+ x=data_x, y=data_y)
+ axins = inset_axes(ax,
+ width="3%",
+ height="95%",
+ loc='lower right',
+ bbox_to_anchor=(0., 0., 1.05, 1),
+ bbox_transform=ax.transAxes,
+ borderpad=0,
+ )
+ cbar = fig.colorbar(cs, cax=axins, format=formatter, orientation='vertical')
+ cbar.set_label('hours')
+
+
fig.subplots_adjust(hspace=0.12, wspace=0.01)
- fig.savefig(outfile, bbox_inches="tight")
+ fig.savefig(outfile, bbox_inches='tight')
+
if __name__ == "__main__":
os.umask(0o002)
parser = argparse.ArgumentParser(
description="Read snap files from identical ensemble runs and create a plot with statistical analysis for a certain forecast-hour",
- usage=f"{sys.argv[0]} --hour FORECAST_HOUR snap_01.nc [snap_02.nc ... snap_xx.nc]",
+ usage=f"{sys.argv[0]} --hour FORECAST_HOUR snap_01.nc [snap_02.nc ... snap_xx.nc]"
)
parser.add_argument("--out", help="output png file", required=True)
- parser.add_argument(
- "--hour", help="hour of output to analyse", type=int, required=True
- )
- # parser.add_argument("--store", help="storeA or storeB, meteo and runtime-datastore, default used from MAPP-system")
- parser.add_argument("SNAPNC", help="snap*.nc filenames", nargs="+")
+ parser.add_argument("--hour", help="hour of output to analyse", type=int, required=True)
+ #parser.add_argument("--store", help="storeA or storeB, meteo and runtime-datastore, default used from MAPP-system")
+ parser.add_argument('SNAPNC', help="snap*.nc filenames", nargs='+')
args = parser.parse_args()
- snapens(args.SNAPNC, args.hour, args.out)
+ snapens(args.SNAPNC, args.hour, args.out)
\ No newline at end of file
diff --git a/utils/SnapPy/snapNc2grib.py b/utils/SnapPy/snapNc2grib.py
index ef85d54c..1b524b6a 100755
--- a/utils/SnapPy/snapNc2grib.py
+++ b/utils/SnapPy/snapNc2grib.py
@@ -6,37 +6,31 @@
import netCDF4
from Snappy.Resources import Resources, snapNc_convert_to_grib
-
def getIsotopesFromFile(filename):
isotop_names = []
- with netCDF4.Dataset(ncfile, "r") as nc:
+ with netCDF4.Dataset(ncfile, 'r') as nc:
for var in nc.variables:
- if var.endswith("acc_concentration"):
+ if var.endswith('acc_concentration'):
continue
- if var.endswith("_concentration"):
+ if var.endswith('_concentration'):
isotop_names.append(var[:-14])
isotopes = []
used_isotop_names = []
for isoId, iso in Resources().getIsotopes().items():
- if iso["isotope"] in isotop_names:
+ if iso['isotope'] in isotop_names:
isotopes.append(isoId)
- used_isotop_names.append(iso["isotope"])
+ used_isotop_names.append(iso['isotope'])
print(f"converting isotopes: {', '.join(used_isotop_names)}", file=sys.stderr)
dropped_names = set(isotop_names).difference(used_isotop_names)
print(f"dropping unknown isotopes: {', '.join(dropped_names)}", file=sys.stderr)
return isotopes
-
if __name__ == "__main__":
- parser = argparse.ArgumentParser(
- description="convert a snap.nc output-file to grib, should be run after snapAddToa"
- )
+ parser = argparse.ArgumentParser(description="convert a snap.nc output-file to grib, should be run after snapAddToa")
parser.add_argument("--nc", help="snap.nc filename", required=True)
parser.add_argument("--ident", help="output-file identifier", required=True)
- parser.add_argument(
- "--bitmapCompress", help="enable grib bitmap-compression", action="store_true"
- )
-
+ parser.add_argument("--bitmapCompress", help="enable grib bitmap-compression", action='store_true')
+
args = parser.parse_args()
ncfile = args.nc
@@ -44,8 +38,6 @@ def getIsotopesFromFile(filename):
ident = args.ident
bitmapCompress = False
if args.bitmapCompress:
- bitmapCompress = True
+ bitmapCompress= True
dirname = os.path.dirname(ncfile)
- snapNc_convert_to_grib(
- ncfile, dirname, ident, isotopes, bitmapCompress=bitmapCompress
- )
+ snapNc_convert_to_grib(ncfile, dirname, ident, isotopes, bitmapCompress=bitmapCompress)
diff --git a/utils/SnapPy/snapRemoteRunner.py b/utils/SnapPy/snapRemoteRunner.py
index 53675ff4..47d8810a 100755
--- a/utils/SnapPy/snapRemoteRunner.py
+++ b/utils/SnapPy/snapRemoteRunner.py
@@ -17,6 +17,7 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
#
+from METNO.SSHConnection import SSHConnection
"""
Created on Mar 2, 2018
@@ -34,7 +35,6 @@
@author: heikok
"""
-from METNO.SSHConnection import SSHConnection
from METNO.HPC import typed_property, HPC
from Snappy.Utils import delete_oldfiles, dirIsWritable
import atexit
@@ -108,7 +108,7 @@ def is_complete(self, reldir):
return False
def handle(self, hpc):
- """Handle the job on the hpc. HPC directories must be writable locally.
+ """ Handle the job on the hpc. HPC directories must be writable locally.
Raise SnapJob.UnknownModelException on input-zip error
Raise Exception on any error
"""
@@ -327,9 +327,7 @@ def _write_status(self, task, tag):
)
elif tag == "running":
fh.write(
- "101:{ts}::queued {model} for processing\n".format(
- ts=timestamp, model=task.model
- )
+ "101:{ts}::queued {model} for processing\n".format(ts=timestamp, model=task.model)
)
elif tag == "internal":
fh.write(
@@ -340,7 +338,8 @@ def _write_status(self, task, tag):
else:
fh.write(
"{x}:{ts}::internal error in status tag\n".format(
- x=500, ts=timestamp, )
+ x=500, ts=timestamp, rundir=task.rundir
+ )
)
print(f"wrong status tag: {tag}", file=sys.stderr)
self.ssh.put_files([work_file], self.remote_dir, 30)
@@ -397,9 +396,9 @@ def _check_and_unpack_new_files(self):
try:
task.handle(self.hpc)
self.write_status(task, tag="running")
- except UnknownModelException:
+ except UnknownModelException as umex:
self.write_status(task, tag="inputerror")
- except Exception:
+ except Exception as ex:
self.write_status(task, tag="internal")
delete_in_upload.append(f)
else:
diff --git a/utils/SnapPy/test/createArgos2Snap.py b/utils/SnapPy/test/createArgos2Snap.py
index 23b43201..dd606b16 100644
--- a/utils/SnapPy/test/createArgos2Snap.py
+++ b/utils/SnapPy/test/createArgos2Snap.py
@@ -2,28 +2,28 @@
#
# SNAP: Servere Nuclear Accident Programme
# Copyright (C) 1992-2017 Norwegian Meteorological Institute
-#
-# This file is part of SNAP. SNAP is free software: you can
-# redistribute it and/or modify it under the terms of the
-# GNU General Public License as published by the
+#
+# This file is part of SNAP. SNAP is free software: you can
+# redistribute it and/or modify it under the terms of the
+# GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
-#
+#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
-#
+#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
#
-"""Create *ARGOS2SNAP.zip and *ARGOS2TRAJ.zip files with current dates"""
+'''Create *ARGOS2SNAP.zip and *ARGOS2TRAJ.zip files with current dates'''
import datetime
import zipfile
-rimsterm = """
+rimsterm = '''
ARGOS application: 9.3.8.0 - User: msolberg
@@ -46,9 +46,9 @@
-"""
+'''
-rimsterm_back = """
+rimsterm_back = '''
ARGOS application: 9.4.9.0 - User: JanErik
@@ -76,16 +76,16 @@
-"""
-request_back = """
+'''
+request_back = '''
-24
3
NetCDF
-"""
+'''
-traj_input = """BROKDORF_(KBR) Source name
+traj_input = '''BROKDORF_(KBR) Source name
53.851 Latitude (dec.deg.)
9.346 Longitude (dec.deg.)
{date}1029 Start (UTC)
@@ -95,9 +95,9 @@
50.0 First parcel (metres above surface)
100.0 Second parcel (metres above surface)
500.0 Third parcel (metres above surface)
-"""
+'''
-traj_back = """backward2 Source name
+traj_back = '''backward2 Source name
69.387 Latitude (dec.deg.)
23.760 Longitude (dec.deg.)
{date}0703 Start (UTC)
@@ -107,27 +107,20 @@
30.0 First parcel (metres above surface)
500.0 Second parcel (metres above surface)
1500.0 Third parcel (metres above surface)
-"""
+'''
today = datetime.datetime.now()
tomorrow = today + datetime.timedelta(days=1)
-with zipfile.ZipFile("Hartlepool-777_ARGOS2SNAP.zip", "w") as zh:
- zh.writestr(
- "Hartlepool-777_Rimsterm.xml", rimsterm.format(date=today.strftime("%Y-%m-%d"))
- )
-
-with zipfile.ZipFile("TestBackModeling_ARGOS2SNAP.zip", "w") as zh:
- zh.writestr(
- "TestBackModeling_Rimsterm.xml",
- rimsterm_back.format(date=today.strftime("%Y-%m-%d")),
- )
- zh.writestr("TestBackModeling_SNAP_request.xml", request_back)
+with zipfile.ZipFile('Hartlepool-777_ARGOS2SNAP.zip', 'w') as zh:
+ zh.writestr('Hartlepool-777_Rimsterm.xml', rimsterm.format(date=today.strftime('%Y-%m-%d')))
-with zipfile.ZipFile("Brokdorf_test_2_ARGOS2TRAJ.zip", "w") as zh:
- zh.writestr(
- "Brokdorf_test_2_TRAJ_input", traj_input.format(date=today.strftime("%Y%m%d"))
- )
+with zipfile.ZipFile('TestBackModeling_ARGOS2SNAP.zip', 'w') as zh:
+ zh.writestr('TestBackModeling_Rimsterm.xml', rimsterm_back.format(date=today.strftime('%Y-%m-%d')))
+ zh.writestr('TestBackModeling_SNAP_request.xml', request_back)
+
+with zipfile.ZipFile('Brokdorf_test_2_ARGOS2TRAJ.zip', 'w') as zh:
+ zh.writestr('Brokdorf_test_2_TRAJ_input', traj_input.format(date=today.strftime('%Y%m%d')))
-with zipfile.ZipFile("backward2_ARGOS2TRAJ.zip", "w") as zh:
- zh.writestr("backward2_TRAJ_input", traj_back.format(date=today.strftime("%Y%m%d")))
+with zipfile.ZipFile('backward2_ARGOS2TRAJ.zip', 'w') as zh:
+ zh.writestr('backward2_TRAJ_input', traj_back.format(date=today.strftime('%Y%m%d')))
diff --git a/utils/addHeader.py b/utils/addHeader.py
index a3d74e72..b4c604c3 100644
--- a/utils/addHeader.py
+++ b/utils/addHeader.py
@@ -24,7 +24,7 @@
def insert_header(oh, ct):
- header = """SNAP: Servere Nuclear Accident Programme
+ header = '''SNAP: Servere Nuclear Accident Programme
Copyright (C) 1992-2020 Norwegian Meteorological Institute
This file is part of SNAP. SNAP is free software: you can
@@ -39,29 +39,32 @@ def insert_header(oh, ct):
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with this program. If not, see ."""
+along with this program. If not, see .'''
for line in header.splitlines():
oh.write(ct + " " + line + "\n")
oh.write(ct + "\n")
+
parser = argparse.ArgumentParser()
parser.add_argument("file", help="snap.nc file to be changed")
parser.add_argument("--type", help="file-type (python/perl/fortran)", required=True)
args = parser.parse_args()
-types = {"python": "#", "perl": "#", "fortran": "!"}
+types = {'python': '#',
+ 'perl': '#',
+ 'fortran': '!'}
if not args.type in types:
print("unknown type: {}".format(args.type), file=sys.stderr)
sys.exit(1)
-with open(args.file, "rt") as ih:
- with open(args.file + ".header", "wt") as oh:
+with open(args.file, 'rt') as ih:
+ with open(args.file + ".header", 'wt') as oh:
line = ih.readline()
- if line.startswith("#!"):
+ if line.startswith('#!'):
oh.write(line)
oh.write(types[args.type] + "\n")
insert_header(oh, types[args.type])
diff --git a/utils/flight_distance.py b/utils/flight_distance.py
index 938582d0..c80968a5 100644
--- a/utils/flight_distance.py
+++ b/utils/flight_distance.py
@@ -2,25 +2,24 @@
# SNAP: Servere Nuclear Accident Programme
# Copyright (C) 1992-2021 Norwegian Meteorological Institute
-#
-# This file is part of SNAP. SNAP is free software: you can
-# redistribute it and/or modify it under the terms of the
-# GNU General Public License as published by the
+#
+# This file is part of SNAP. SNAP is free software: you can
+# redistribute it and/or modify it under the terms of the
+# GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
-#
+#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
-#
+#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
import argparse
import matplotlib
-
-matplotlib.use("Agg")
+matplotlib.use('Agg')
import matplotlib.pyplot as plt
import math
import netCDF4
@@ -28,14 +27,12 @@
import pathlib
import sys
-
def distance(clon, clat, lons, lats):
- """calculate the distance of cells given by lons,lats from a clon,clat
- using the haversine formula
- """
+ '''calculate the distance of cells given by lons,lats from a clon,clat
+ using the haversine formula
+ '''
from math import sin, cos, sqrt, atan2, radians
-
- R = 6371.0
+ R = 6371.
rclat = radians(clat)
rclon = radians(clon)
@@ -45,7 +42,7 @@ def distance(clon, clat, lons, lats):
dlon = rlons - rclon
dlat = rlats - rclat
- a = np.sin(dlat / 2) ** 2 + cos(rclat) * np.cos(rlats) * np.sin(dlon / 2) ** 2
+ a = np.sin(dlat / 2)**2 + cos(rclat) * np.cos(rlats) * np.sin(dlon / 2)**2
c = 2 * np.arctan2(np.sqrt(a), np.sqrt(1 - a))
return R * c
@@ -56,18 +53,9 @@ def distance(clon, clat, lons, lats):
description="Plot the shortest downwind distance for concentration/deposition from SNAP output files"
)
parser.add_argument("--input", type=pathlib.Path, required=True)
- parser.add_argument(
- "--output", type=pathlib.Path, help="output-file", required=True
- )
- parser.add_argument(
- "--parameter",
- type=str,
- help="comma-separated list of parameters (accumulated)",
- default="Cs137_acc_concentration",
- )
- parser.add_argument(
- "--timestep", type=int, help="timestep to analyze", default="-1"
- )
+ parser.add_argument("--output", type=pathlib.Path, help="output-file", required=True)
+ parser.add_argument("--parameter", type=str, help="comma-separated list of parameters (accumulated)", default="Cs137_acc_concentration")
+ parser.add_argument("--timestep", type=int, help="timestep to analyze", default="-1")
parser.add_argument("--lat", type=float, help="source-latitude", required=True)
parser.add_argument("--lon", type=float, help="source-longitude", required=True)
args = parser.parse_args()
@@ -77,45 +65,43 @@ def distance(clon, clat, lons, lats):
tstep = args.timestep
ifile = args.input
ofile = args.output
- params = args.parameter.split(",")
+ params = args.parameter.split(',')
- with netCDF4.Dataset(ifile, "r") as nc:
- dist = distance(lon, lat, nc["longitude"][:], nc["latitude"][:])
+ with netCDF4.Dataset(ifile, 'r') as nc:
+ dist = distance(lon, lat, nc['longitude'][:], nc['latitude'][:])
- # print(np.min(dist), np.max(dist))
+ #print(np.min(dist), np.max(dist))
data = 0
xvals = []
yvals = []
for p in params:
- data += nc[p][tstep, :]
+ data += nc[p][tstep,:]
maxlog = math.ceil(math.log10(np.max(data)))
print(np.max(data), maxlog)
- thresholds = np.logspace(maxlog - 9, 9, num=400, dtype="float")
- times = netCDF4.num2date(nc["time"][:], nc["time"].units)
- td = (times[tstep] - times[0]) + (
- times[1] - times[0]
- ) # snap omits writing first timestep, so add one
- hours = td.days * 24 + td.seconds // 3600
- data /= hours # average
+ thresholds = np.logspace(maxlog-9,9,num=400, dtype='float')
+ times = netCDF4.num2date(nc['time'][:], nc['time'].units)
+ td = (times[tstep] - times[0]) + (times[1] - times[0]) # snap omits writing first timestep, so add one
+ hours = td.days*24 + td.seconds//3600
+ data /= hours # average
for i in range(len(thresholds)):
mask = data > thresholds[i]
- if np.sum(mask) > 0:
+ if (np.sum(mask) > 0):
xvals.append(np.max(dist[mask]))
yvals.append(thresholds[i])
fig = plt.figure()
ax = plt.axes()
- ax.plot(xvals, yvals, color="b", label=", ".join(params))
+ ax.plot(xvals, yvals, color='b', label=", ".join(params))
ax.legend()
- ax.plot([1, 300], [1000, 1000], color="y")
- ax.plot([1, 300], [10000, 10000], color="r")
- ax.set_yscale("log")
- ax.set_xscale("log")
- ax.set_xlabel("Downwind distance [km]")
- ax.set_ylabel(f"Bq/m³ ({hours}h avg.)")
- ax.grid("b", which="both")
+ ax.plot([1,300], [1000, 1000], color='y' )
+ ax.plot([1,300], [10000, 10000], color='r' )
+ ax.set_yscale('log')
+ ax.set_xscale('log')
+ ax.set_xlabel('Downwind distance [km]')
+ ax.set_ylabel(f'Bq/m³ ({hours}h avg.)')
+ ax.grid('b', which='both')
formatter = matplotlib.ticker.ScalarFormatter()
- formatter.set_powerlimits((-3, 10))
+ formatter.set_powerlimits((-3,10))
ax.xaxis.set_major_formatter(formatter)
# ax.set_xlim([1,200])
fig.savefig(ofile)
diff --git a/utils/snapMapPlot.py b/utils/snapMapPlot.py
index d218e813..75fd3fe8 100644
--- a/utils/snapMapPlot.py
+++ b/utils/snapMapPlot.py
@@ -1,108 +1,84 @@
# SNAP: Servere Nuclear Accident Programme
# Copyright (C) 1992-2017 Norwegian Meteorological Institute
-#
-# This file is part of SNAP. SNAP is free software: you can
-# redistribute it and/or modify it under the terms of the
-# GNU General Public License as published by the
+#
+# This file is part of SNAP. SNAP is free software: you can
+# redistribute it and/or modify it under the terms of the
+# GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
-#
+#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
-#
+#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
#
from mpl_toolkits.basemap import Basemap
-
# requires netcdf4-python (netcdf4-python.googlecode.com)
from netCDF4 import Dataset as NetCDFFile
import numpy as np
import matplotlib.pyplot as plt
import matplotlib
-
def fmt(x, pos):
- a, b = "{:.1e}".format(x).split("e")
+ a, b = '{:.1e}'.format(x).split('e')
b = int(b)
- return "{:2.0e}".format(x)
+ return '{:2.0e}'.format(x)
-def plotMap(
- data,
- lons,
- lats,
- ax,
- title="",
- bb={"south": 65, "north": 75, "west": 10, "east": 35},
- clevs=[10, 100, 300, 1000, 3000, 10000, 30000, 100000, 300000, 10000000],
-):
- m = Basemap(
- projection="cyl",
- llcrnrlat=bb["south"],
- urcrnrlat=bb["north"],
- llcrnrlon=bb["west"],
- urcrnrlon=bb["east"],
- resolution="i",
- ax=ax,
- )
+def plotMap(data, lons, lats, ax, title="", bb={"south":65, "north":75, "west":10, "east":35}, clevs=[10,100,300,1000,3000,10000,30000,100000, 300000, 10000000]):
+ m = Basemap(projection='cyl',llcrnrlat=bb["south"],urcrnrlat=bb["north"], \
+ llcrnrlon=bb["west"],urcrnrlon=bb["east"],resolution='i', ax=ax)
# find x,y of map projection grid.
- # lons, lats = np.meshgrid(lons, lats)
+ #lons, lats = np.meshgrid(lons, lats)
x, y = m(lons, lats)
# draw coastlines, state and country boundaries, edge of map.
m.drawcoastlines()
- m.drawlsmask(grid=1.25, resolution="i")
+ m.drawlsmask(grid=1.25, resolution='i')
m.drawcountries()
# draw parallels.
- parallels = np.arange(0.0, 90, 10.0)
- m.drawparallels(parallels, labels=[1, 0, 0, 0], fontsize=10)
+ parallels = np.arange(0.,90,10.)
+ m.drawparallels(parallels,labels=[1,0,0,0],fontsize=10)
# draw meridians
- meridians = np.arange(-180.0, 180.0, 10.0)
- m.drawmeridians(meridians, labels=[0, 0, 0, 1], fontsize=10)
+ meridians = np.arange(-180.,180.,10.)
+ m.drawmeridians(meridians,labels=[0,0,0,1],fontsize=10)
ny = data.shape[0]
nx = data.shape[1]
# draw filled contours.
- colors = [plt.cm.jet(x) for x in np.linspace(0, 1, len(clevs))]
- cs = m.contourf(x, y, data, clevs, colors=colors)
+ colors = [ plt.cm.jet(x) for x in np.linspace(0, 1, len(clevs)) ]
+ cs = m.contourf(x,y,data,clevs,colors=colors)
# add colorbar.
- cbar = m.colorbar(
- cs, location="bottom", pad="10%", format=matplotlib.ticker.FuncFormatter(fmt)
- )
- # cbar.set_label('Bq/m2')
+ cbar = m.colorbar(cs,location='bottom',pad="10%", format=matplotlib.ticker.FuncFormatter(fmt))
+ #cbar.set_label('Bq/m2')
# add title
ax.set_title(title)
-fig, axi = plt.subplots(4, 3, sharex=True, sharey=True, figsize=(14, 15.5))
-for ix, radius in enumerate([5, 10, 20]):
- for iy, dens in enumerate([19, 11.6, 8.1, 3.3]):
- nc = NetCDFFile(
- "/lustre/storeB/project/fou/kl/cerad/Projects/2017_KolaRework/Runs/run_{}_{}/snap.nc".format(
- radius, dens
- )
- )
+fig, axi = plt.subplots(4,3,sharex=True, sharey=True, figsize=(14, 15.5))
+
+for ix, radius in enumerate([5,10,20]):
+ for iy, dens in enumerate([19,11.6,8.1,3.3]):
+ nc = NetCDFFile('/lustre/storeB/project/fou/kl/cerad/Projects/2017_KolaRework/Runs/run_{}_{}/snap.nc'.format(radius,dens))
# data from http://water.weather.gov/precip/
- var1 = nc.variables["Cs137_acc_wet_deposition"]
- var2 = nc.variables["Cs137_acc_dry_deposition"]
- data = var1[24, :, :] + var2[24, :, :]
+ var1 = nc.variables['Cs137_acc_wet_deposition']
+ var2 = nc.variables['Cs137_acc_dry_deposition']
+ data = var1[24,:,:] + var2[24,:,:]
- lons = nc.variables["longitude"][:]
- lats = nc.variables["latitude"][:]
+ lons = nc.variables['longitude'][:]
+ lats = nc.variables['latitude'][:]
- plotMap(
- data,
- title="{}µm {}g/cm3".format(2 * radius, dens),
- ax=axi[iy, ix],
- bb={"south": 62, "north": 77, "west": 12, "east": 33},
- clevs=[100000, 300000, 1000000, 3000000, 10000000, 100000000],
- lons=lons,
- lats=lats,
- )
+ plotMap(data,
+ title="{}µm {}g/cm3".format(2*radius, dens),
+ ax=axi[iy,ix],
+ bb={"south":62, "north":77, "west":12, "east":33},
+ clevs=[100000, 300000, 1000000, 3000000, 10000000, 100000000],
+ lons=lons, lats=lats)
fig.subplots_adjust(hspace=0.01, wspace=0.2)
fig.savefig("kolaFlightMaps.png")
-# plt.show()
+#plt.show()
+