diff --git a/.gitignore b/.gitignore
index 869c78107c..2935804ac4 100644
--- a/.gitignore
+++ b/.gitignore
@@ -36,7 +36,6 @@ fix/gsi
fix/lut
fix/mom6
fix/orog
-fix/reg2grb2
fix/sfc_climo
fix/ugwd
fix/verif
@@ -99,6 +98,9 @@ parm/post/postxconfig-NT-GFS-WAFS.txt
parm/post/postxconfig-NT-GFS.txt
parm/post/postxconfig-NT-gefs-aerosol.txt
parm/post/postxconfig-NT-gefs-chem.txt
+parm/post/ocean.csv
+parm/post/ice.csv
+parm/post/ocnicepost.nml.jinja2
parm/ufs/noahmptable.tbl
parm/ufs/model_configure.IN
parm/ufs/MOM_input_*.IN
@@ -137,7 +139,6 @@ sorc/radmon_bcor.fd
sorc/radmon_time.fd
sorc/rdbfmsua.fd
sorc/recentersigp.fd
-sorc/reg2grb2.fd
sorc/supvit.fd
sorc/syndat_getjtbul.fd
sorc/syndat_maksynrc.fd
@@ -147,6 +148,7 @@ sorc/tocsbufr.fd
sorc/upp.fd
sorc/vint.fd
sorc/webtitle.fd
+sorc/ocnicepost.fd
# Ignore scripts from externals
#------------------------------
diff --git a/Jenkinsfile b/Jenkinsfile
index 9f3688ea6c..52175c2b50 100644
--- a/Jenkinsfile
+++ b/Jenkinsfile
@@ -1,15 +1,15 @@
def Machine = 'none'
def machine = 'none'
def HOME = 'none'
-def localworkspace = 'none'
-def commonworkspace = 'none'
+def caseList = ''
+def custom_workspace = [hera: '/scratch1/NCEPDEV/global/CI', orion: '/work2/noaa/stmp/CI/ORION', hercules: '/work2/noaa/stmp/CI/HERCULES']
pipeline {
agent { label 'built-in' }
options {
skipDefaultCheckout()
- buildDiscarder(logRotator(numToKeepStr: '2'))
+ parallelsAlwaysFailFast()
}
stages { // This initial stage is used to get the Machine name from the GitHub labels on the PR
@@ -20,7 +20,6 @@ pipeline {
agent { label 'built-in' }
steps {
script {
- localworkspace = env.WORKSPACE
machine = 'none'
for (label in pullRequest.labels) {
echo "Label: ${label}"
@@ -42,17 +41,19 @@ pipeline {
agent { label "${machine}-emc" }
steps {
script {
- properties([parameters([[$class: 'NodeParameterDefinition', allowedSlaves: ['built-in', 'Hera-EMC', 'Orion-EMC'], defaultSlaves: ['built-in'], name: '', nodeEligibility: [$class: 'AllNodeEligibility'], triggerIfResult: 'allCases']])])
- HOME = "${WORKSPACE}/TESTDIR"
- commonworkspace = "${WORKSPACE}"
- sh(script: "mkdir -p ${HOME}/RUNTESTS", returnStatus: true)
- pullRequest.addLabel("CI-${Machine}-Building")
- if (pullRequest.labels.any { value -> value.matches("CI-${Machine}-Ready") }) {
- pullRequest.removeLabel("CI-${Machine}-Ready")
+ ws("${custom_workspace[machine]}/${env.CHANGE_ID}") {
+ properties([parameters([[$class: 'NodeParameterDefinition', allowedSlaves: ['built-in', 'Hera-EMC', 'Orion-EMC'], defaultSlaves: ['built-in'], name: '', nodeEligibility: [$class: 'AllNodeEligibility'], triggerIfResult: 'allCases']])])
+ HOME = "${WORKSPACE}"
+ sh(script: "mkdir -p ${HOME}/RUNTESTS;rm -Rf ${HOME}/RUNTESTS/error.logs")
+ pullRequest.addLabel("CI-${Machine}-Building")
+ if (pullRequest.labels.any { value -> value.matches("CI-${Machine}-Ready") }) {
+ pullRequest.removeLabel("CI-${Machine}-Ready")
+ }
+ }
+ pullRequest.comment("Building and running on ${Machine} in directory ${HOME}")
}
}
}
- }
stage('Build System') {
matrix {
@@ -71,35 +72,42 @@ pipeline {
steps {
script {
def HOMEgfs = "${HOME}/${system}" // local HOMEgfs is used to build the system on per system basis under the common workspace HOME
- sh(script: "mkdir -p ${HOMEgfs}", returnStatus: true)
+ sh(script: "mkdir -p ${HOMEgfs}")
ws(HOMEgfs) {
- env.MACHINE_ID = machine // MACHINE_ID is used in the build scripts to determine the machine and is added to the shell environment
if (fileExists("${HOMEgfs}/sorc/BUILT_semaphor")) { // if the system is already built, skip the build in the case of re-runs
sh(script: "cat ${HOMEgfs}/sorc/BUILT_semaphor", returnStdout: true).trim() // TODO: and user configurable control to manage build semphore
- ws(commonworkspace) { pullRequest.comment("Cloned PR already built (or build skipped) on ${machine} in directory ${HOMEgfs}") }
+ pullRequest.comment("Cloned PR already built (or build skipped) on ${machine} in directory ${HOMEgfs}
Still doing a checkout to get the latest changes")
+ checkout scm
+ dir('sorc') {
+ sh(script: './link_workflow.sh')
+ }
} else {
checkout scm
- sh(script: 'source workflow/gw_setup.sh;which git;git --version;git submodule update --init --recursive', returnStatus: true)
def builds_file = readYaml file: 'ci/cases/yamls/build.yaml'
def build_args_list = builds_file['builds']
def build_args = build_args_list[system].join(' ').trim().replaceAll('null', '')
dir("${HOMEgfs}/sorc") {
- sh(script: "${build_args}", returnStatus: true)
- sh(script: './link_workflow.sh', returnStatus: true)
- sh(script: "echo ${HOMEgfs} > BUILT_semaphor", returnStatus: true)
+ sh(script: "${build_args}")
+ sh(script: './link_workflow.sh')
+ sh(script: "echo ${HOMEgfs} > BUILT_semaphor")
}
}
- if (pullRequest.labels.any { value -> value.matches("CI-${Machine}-Building") }) {
- pullRequest.removeLabel("CI-${Machine}-Building")
- }
- pullRequest.addLabel("CI-${Machine}-Running")
- }
+ if (env.CHANGE_ID && system == 'gfs') {
+ if (pullRequest.labels.any { value -> value.matches("CI-${Machine}-Building") }) {
+ pullRequest.removeLabel("CI-${Machine}-Building")
+ }
+ pullRequest.addLabel("CI-${Machine}-Running")
+ }
+ if (system == 'gfs') {
+ caseList = sh(script: "${HOMEgfs}/ci/scripts/utils/get_host_case_list.py ${machine}", returnStdout: true).trim().split()
+ }
+ }
+ }
}
}
}
}
}
-}
stage('Run Tests') {
matrix {
@@ -108,23 +116,31 @@ pipeline {
axis {
name 'Case'
// TODO add dynamic list of cases from env vars (needs addtional plugins)
- values 'C48_ATM', 'C48_S2SWA_gefs', 'C48_S2SW', 'C96_atm3DVar', 'C48mx500_3DVarAOWCDA', 'C96C48_hybatmDA', 'C96_atmsnowDA'
+ values 'C48C48_ufs_hybatmDA', 'C48_ATM', 'C48_S2SW', 'C48_S2SWA_gefs', 'C48mx500_3DVarAOWCDA', 'C96C48_hybatmDA', 'C96_atm3DVar', 'C96_atmsnowDA'
}
}
stages {
- stage('Create Experiment') {
+
+ stage('Create Experiments') {
+ when {
+ expression { return caseList.contains(Case) }
+ }
steps {
script {
- sh(script: "sed -n '/{.*}/!p' ${HOME}/gfs/ci/cases/pr/${Case}.yaml > ${HOME}/gfs/ci/cases/pr/${Case}.yaml.tmp", returnStatus: true)
+ sh(script: "sed -n '/{.*}/!p' ${HOME}/gfs/ci/cases/pr/${Case}.yaml > ${HOME}/gfs/ci/cases/pr/${Case}.yaml.tmp")
def yaml_case = readYaml file: "${HOME}/gfs/ci/cases/pr/${Case}.yaml.tmp"
system = yaml_case.experiment.system
def HOMEgfs = "${HOME}/${system}" // local HOMEgfs is used to populate the XML on per system basis
env.RUNTESTS = "${HOME}/RUNTESTS"
- sh(script: "${HOMEgfs}/ci/scripts/utils/ci_utils_wrapper.sh create_experiment ${HOMEgfs}/ci/cases/pr/${Case}.yaml", returnStatus: true)
+ sh(script: "${HOMEgfs}/ci/scripts/utils/ci_utils_wrapper.sh create_experiment ${HOMEgfs}/ci/cases/pr/${Case}.yaml")
}
}
}
+
stage('Run Experiments') {
+ when {
+ expression { return caseList.contains(Case) }
+ }
steps {
script {
HOMEgfs = "${HOME}/gfs" // common HOMEgfs is used to launch the scripts that run the experiments
@@ -132,16 +148,28 @@ pipeline {
pslot = sh(script: "${HOMEgfs}/ci/scripts/utils/ci_utils_wrapper.sh get_pslot ${HOME}/RUNTESTS ${Case}", returnStdout: true).trim()
pullRequest.comment("**Running** experiment: ${Case} on ${Machine}
With the experiment in directory:
`${HOME}/RUNTESTS/${pslot}`")
try {
- sh(script: "${HOMEgfs}/ci/scripts/run-check_ci.sh ${HOME} ${pslot}", returnStatus: true)
+ sh(script: "${HOMEgfs}/ci/scripts/run-check_ci.sh ${HOME} ${pslot}")
} catch (Exception e) {
pullRequest.comment("**FAILURE** running experiment: ${Case} on ${Machine}")
+ sh(script: "${HOMEgfs}/ci/scripts/utils/ci_utils_wrapper.sh cancel_all_batch_jobs ${HOME}/RUNTESTS")
+ ws(HOME) {
+ if (fileExists('RUNTESTS/error.logs')) {
+ def fileContent = readFile 'RUNTESTS/error.logs'
+ def lines = fileContent.readLines()
+ for (line in lines) {
+ echo "archiving: ${line}"
+ archiveArtifacts artifacts: "${line}", fingerprint: true
+ }
+ }
+ }
error("Failed to run experiments ${Case} on ${Machine}")
}
- pullRequest.comment("**SUCCESS** running experiment: ${Case} on ${Machine}")
}
+ pullRequest.comment("**SUCCESS** running experiment: ${Case} on ${Machine}")
}
}
}
+
}
}
}
@@ -175,14 +203,6 @@ pipeline {
def timestamp = new Date().format('MM dd HH:mm:ss', TimeZone.getTimeZone('America/New_York'))
pullRequest.comment("**CI FAILED** ${Machine} at ${timestamp}
Built and ran in directory `${HOME}`")
}
- if (fileExists('${HOME}/RUNTESTS/ci.log')) {
- def fileContent = readFile '${HOME}/RUNTESTS/ci.log'
- fileContent.eachLine { line ->
- if (line.contains('.log')) {
- archiveArtifacts artifacts: "${line}", fingerprint: true
- }
- }
- }
}
}
}
diff --git a/ci/cases/pr/C48C48_ufs_hybatmDA.yaml b/ci/cases/pr/C48C48_ufs_hybatmDA.yaml
new file mode 100644
index 0000000000..7d3644b1af
--- /dev/null
+++ b/ci/cases/pr/C48C48_ufs_hybatmDA.yaml
@@ -0,0 +1,22 @@
+experiment:
+ system: gfs
+ mode: cycled
+
+arguments:
+ pslot: {{ 'pslot' | getenv }}
+ app: ATM
+ resdetatmos: 48
+ resensatmos: 48
+ comroot: {{ 'RUNTESTS' | getenv }}/COMROOT
+ expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR
+ idate: 2021032312
+ edate: 2021032400
+ nens: 2
+ gfs_cyc: 1
+ start: warm
+ yaml: {{ HOMEgfs }}/ci/cases/yamls/ufs_hybatmDA_defaults.ci.yaml
+
+skip_ci_on_hosts:
+ - hera
+ - orion
+ - hercules
diff --git a/ci/cases/pr/C48mx500_3DVarAOWCDA.yaml b/ci/cases/pr/C48mx500_3DVarAOWCDA.yaml
index b972d3a445..d9156e38f3 100644
--- a/ci/cases/pr/C48mx500_3DVarAOWCDA.yaml
+++ b/ci/cases/pr/C48mx500_3DVarAOWCDA.yaml
@@ -19,4 +19,5 @@ arguments:
skip_ci_on_hosts:
- orion
+ - hera
- hercules
diff --git a/ci/cases/yamls/atmsnowDA_defaults_ci.yaml b/ci/cases/yamls/atmsnowDA_defaults_ci.yaml
index f805902931..417525742e 100644
--- a/ci/cases/yamls/atmsnowDA_defaults_ci.yaml
+++ b/ci/cases/yamls/atmsnowDA_defaults_ci.yaml
@@ -2,5 +2,5 @@ defaults:
!INC {{ HOMEgfs }}/parm/config/gfs/yaml/defaults.yaml
base:
DOIAU: "NO"
- DO_JEDILANDDA: "YES"
+ DO_JEDISNOWDA: "YES"
ACCOUNT: {{ 'SLURM_ACCOUNT' | getenv }}
diff --git a/ci/cases/yamls/build.yaml b/ci/cases/yamls/build.yaml
index 5398fa1889..2ff008d372 100644
--- a/ci/cases/yamls/build.yaml
+++ b/ci/cases/yamls/build.yaml
@@ -1,3 +1,3 @@
builds:
- gefs: './build_all.sh'
- - gfs: './build_all.sh -gu'
\ No newline at end of file
+ - gfs: './build_all.sh -wgu'
diff --git a/ci/cases/yamls/ufs_hybatmDA_defaults.ci.yaml b/ci/cases/yamls/ufs_hybatmDA_defaults.ci.yaml
new file mode 100644
index 0000000000..126c0f461a
--- /dev/null
+++ b/ci/cases/yamls/ufs_hybatmDA_defaults.ci.yaml
@@ -0,0 +1,20 @@
+defaults:
+ !INC {{ HOMEgfs }}/parm/config/gfs/yaml/defaults.yaml
+base:
+ DOIAU: "NO"
+ DO_JEDIATMVAR: "YES"
+ DO_JEDIATMENS: "YES"
+ ACCOUNT: {{ 'SLURM_ACCOUNT' | getenv }}
+atmanl:
+ LAYOUT_X_ATMANL: 1
+ LAYOUT_Y_ATMANL: 1
+atmensanl:
+ LAYOUT_X_ATMENSANL: 1
+ LAYOUT_Y_ATMENSANL: 1
+esfc:
+ DONST: "NO"
+nsst:
+ NST_MODEL: "1"
+sfcanl:
+ DONST: "NO"
+
diff --git a/ci/scripts/check_ci.sh b/ci/scripts/check_ci.sh
index cda2d4e9f2..4ff7eefd26 100755
--- a/ci/scripts/check_ci.sh
+++ b/ci/scripts/check_ci.sh
@@ -8,7 +8,7 @@ set -eux
# to run from within a cron job in the CI Managers account
#####################################################################################
-ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." >/dev/null 2>&1 && pwd )"
+HOMEgfs="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." >/dev/null 2>&1 && pwd )"
scriptname=$(basename "${BASH_SOURCE[0]}")
echo "Begin ${scriptname} at $(date -u)" || true
export PS4='+ $(basename ${BASH_SOURCE})[${LINENO}]'
@@ -20,11 +20,11 @@ REPO_URL="https://github.com/NOAA-EMC/global-workflow.git"
# Set up runtime environment varibles for accounts on supproted machines
#########################################################################
-source "${ROOT_DIR}/ush/detect_machine.sh"
+source "${HOMEgfs}/ush/detect_machine.sh"
case ${MACHINE_ID} in
hera | orion | hercules)
echo "Running Automated Testing on ${MACHINE_ID}"
- source "${ROOT_DIR}/ci/platforms/config.${MACHINE_ID}"
+ source "${HOMEgfs}/ci/platforms/config.${MACHINE_ID}"
;;
*)
echo "Unsupported platform. Exiting with error."
@@ -32,9 +32,10 @@ case ${MACHINE_ID} in
;;
esac
set +x
-source "${ROOT_DIR}/ush/module-setup.sh"
-source "${ROOT_DIR}/ci/scripts/utils/ci_utils.sh"
-module use "${ROOT_DIR}/modulefiles"
+export HOMEgfs
+source "${HOMEgfs}/ush/module-setup.sh"
+source "${HOMEgfs}/ci/scripts/utils/ci_utils.sh"
+module use "${HOMEgfs}/modulefiles"
module load "module_gwsetup.${MACHINE_ID}"
module list
set -x
@@ -57,7 +58,7 @@ pr_list_dbfile="${GFS_CI_ROOT}/open_pr_list.db"
pr_list=""
if [[ -f "${pr_list_dbfile}" ]]; then
- pr_list=$("${ROOT_DIR}/ci/scripts/pr_list_database.py" --dbfile "${pr_list_dbfile}" --display | grep -v Failed | grep Running | awk '{print $1}') || true
+ pr_list=$("${HOMEgfs}/ci/scripts/pr_list_database.py" --dbfile "${pr_list_dbfile}" --display | grep -v Failed | grep Running | awk '{print $1}') || true
fi
if [[ -z "${pr_list+x}" ]]; then
echo "no PRs open and ready to run cases on .. exiting"
@@ -89,13 +90,13 @@ for pr in ${pr_list}; do
sed -i "1 i\`\`\`" "${output_ci}"
sed -i "1 i\All CI Test Cases Passed on ${MACHINE_ID^}:" "${output_ci}"
"${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${output_ci}"
- "${ROOT_DIR}/ci/scripts/pr_list_database.py" --remove_pr "${pr}" --dbfile "${pr_list_dbfile}"
+ "${HOMEgfs}/ci/scripts/pr_list_database.py" --remove_pr "${pr}" --dbfile "${pr_list_dbfile}"
# Check to see if this PR that was opened by the weekly tests and if so close it if it passed on all platforms
weekly_labels=$(${GH} pr view "${pr}" --repo "${REPO_URL}" --json headRefName,labels,author --jq 'select(.author.login | contains("emcbot")) | select(.headRefName | contains("weekly_ci")) | .labels[].name ') || true
if [[ -n "${weekly_labels}" ]]; then
- num_platforms=$(find "${ROOT_DIR}/ci/platforms" -type f -name "config.*" | wc -l)
+ num_platforms=$(find "${HOMEgfs}/ci/platforms" -type f -name "config.*" | wc -l)
passed=0
- for platforms in "${ROOT_DIR}"/ci/platforms/config.*; do
+ for platforms in "${HOMEgfs}"/ci/platforms/config.*; do
machine=$(basename "${platforms}" | cut -d. -f2)
if [[ "${weekly_labels}" == *"CI-${machine^}-Passed"* ]]; then
((passed=passed+1))
@@ -139,7 +140,7 @@ for pr in ${pr_list}; do
} >> "${output_ci}"
sed -i "1 i\`\`\`" "${output_ci}"
"${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${output_ci}"
- "${ROOT_DIR}/ci/scripts/pr_list_database.py" --remove_pr "${pr}" --dbfile "${pr_list_dbfile}"
+ "${HOMEgfs}/ci/scripts/pr_list_database.py" --remove_pr "${pr}" --dbfile "${pr_list_dbfile}"
for kill_cases in "${pr_dir}/RUNTESTS/"*; do
pslot=$(basename "${kill_cases}")
cancel_slurm_jobs "${pslot}"
diff --git a/ci/scripts/clone-build_ci.sh b/ci/scripts/clone-build_ci.sh
index 798c98bf50..989afabb80 100755
--- a/ci/scripts/clone-build_ci.sh
+++ b/ci/scripts/clone-build_ci.sh
@@ -74,7 +74,7 @@ set +e
source "${HOMEgfs}/ush/module-setup.sh"
export BUILD_JOBS=8
rm -rf log.build
-./build_all.sh -gu >> log.build 2>&1
+./build_all.sh -guw >> log.build 2>&1
build_status=$?
DATE=$(date +'%D %r')
diff --git a/ci/scripts/driver.sh b/ci/scripts/driver.sh
index 5fc13ea524..f37b5e3f2e 100755
--- a/ci/scripts/driver.sh
+++ b/ci/scripts/driver.sh
@@ -47,12 +47,15 @@ esac
######################################################
# setup runtime env for correct python install and git
######################################################
+HOMEgfs=${ROOT_DIR}
+export HOMEgfs
set +x
source "${ROOT_DIR}/ci/scripts/utils/ci_utils.sh"
source "${ROOT_DIR}/ush/module-setup.sh"
module use "${ROOT_DIR}/modulefiles"
module load "module_gwsetup.${MACHINE_ID}"
set -x
+unset HOMEgfs
############################################################
# query repo and get list of open PRs with tags {machine}-CI
diff --git a/ci/scripts/run-check_ci.sh b/ci/scripts/run-check_ci.sh
index f98f434462..8e1e927050 100755
--- a/ci/scripts/run-check_ci.sh
+++ b/ci/scripts/run-check_ci.sh
@@ -25,6 +25,7 @@ pslot=${2:-${pslot:-?}} # Name of the experiment being tested by this scr
# TODO: Make this configurable (for now all scripts run from gfs for CI at runtime)
HOMEgfs="${TEST_DIR}/gfs"
RUNTESTS="${TEST_DIR}/RUNTESTS"
+run_check_logfile="${RUNTESTS}/ci-run_check.log"
# Source modules and setup logging
echo "Source modules."
@@ -77,15 +78,16 @@ while true; do
{
echo "Experiment ${pslot} Terminated with ${num_failed} tasks failed at $(date)" || true
echo "Experiment ${pslot} Terminated: *FAILED*"
- } >> "${RUNTESTS}/ci.log"
-
+ } | tee -a "${run_check_logfile}"
error_logs=$(rocotostat -d "${db}" -w "${xml}" | grep -E 'FAIL|DEAD' | awk '{print "-c", $1, "-t", $2}' | xargs rocotocheck -d "${db}" -w "${xml}" | grep join | awk '{print $2}') || true
{
echo "Error logs:"
echo "${error_logs}"
- } >> "${RUNTESTS}/ci.log"
- sed -i "s/\`\`\`//2g" "${RUNTESTS}/ci.log"
- sacct --format=jobid,jobname%35,WorkDir%100,stat | grep "${pslot}" | grep "${pr}\/RUNTESTS" | awk '{print $1}' | xargs scancel || true
+ } | tee -a "${run_check_logfile}"
+ # rm -f "${RUNTESTS}/error.logs"
+ for log in ${error_logs}; do
+ echo "RUNTESTS${log#*RUNTESTS}" >> "${RUNTESTS}/error.logs"
+ done
rc=1
break
fi
@@ -95,8 +97,7 @@ while true; do
echo "Experiment ${pslot} Completed at $(date)" || true
echo "with ${num_succeeded} successfully completed jobs" || true
echo "Experiment ${pslot} Completed: *SUCCESS*"
- } >> "${RUNTESTS}/ci.log"
- sed -i "s/\`\`\`//2g" "${RUNTESTS}/ci.log"
+ } | tee -a "${run_check_logfile}"
rc=0
break
fi
@@ -107,3 +108,4 @@ while true; do
done
exit "${rc}"
+
diff --git a/ci/scripts/run_ci.sh b/ci/scripts/run_ci.sh
index 4a390a23f2..f50a4465d0 100755
--- a/ci/scripts/run_ci.sh
+++ b/ci/scripts/run_ci.sh
@@ -9,7 +9,7 @@ set -eux
# Abstract TODO
#####################################################################################
-ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." >/dev/null 2>&1 && pwd )"
+HOMEgfs="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." >/dev/null 2>&1 && pwd )"
scriptname=$(basename "${BASH_SOURCE[0]}")
echo "Begin ${scriptname} at $(date -u)" || true
export PS4='+ $(basename ${BASH_SOURCE})[${LINENO}]'
@@ -18,11 +18,11 @@ export PS4='+ $(basename ${BASH_SOURCE})[${LINENO}]'
# Set up runtime environment varibles for accounts on supproted machines
#########################################################################
-source "${ROOT_DIR}/ush/detect_machine.sh"
+source "${HOMEgfs}/ush/detect_machine.sh"
case ${MACHINE_ID} in
hera | orion | hercules)
echo "Running Automated Testing on ${MACHINE_ID}"
- source "${ROOT_DIR}/ci/platforms/config.${MACHINE_ID}"
+ source "${HOMEgfs}/ci/platforms/config.${MACHINE_ID}"
;;
*)
echo "Unsupported platform. Exiting with error."
@@ -30,8 +30,9 @@ case ${MACHINE_ID} in
;;
esac
set +x
-source "${ROOT_DIR}/ush/module-setup.sh"
-module use "${ROOT_DIR}/modulefiles"
+export HOMEgfs
+source "${HOMEgfs}/ush/module-setup.sh"
+module use "${HOMEgfs}/modulefiles"
module load "module_gwsetup.${MACHINE_ID}"
module list
set -eux
@@ -47,7 +48,7 @@ pr_list_dbfile="${GFS_CI_ROOT}/open_pr_list.db"
pr_list=""
if [[ -f "${pr_list_dbfile}" ]]; then
- pr_list=$("${ROOT_DIR}/ci/scripts/pr_list_database.py" --display --dbfile "${pr_list_dbfile}" | grep -v Failed | grep Open | grep Running | awk '{print $1}' | head -"${max_concurrent_pr}") || true
+ pr_list=$("${HOMEgfs}/ci/scripts/pr_list_database.py" --display --dbfile "${pr_list_dbfile}" | grep -v Failed | grep Open | grep Running | awk '{print $1}' | head -"${max_concurrent_pr}") || true
fi
if [[ -z "${pr_list}" ]]; then
echo "no open and built PRs that are ready for the cases to advance with rocotorun .. exiting"
diff --git a/ci/scripts/utils/ci_utils.sh b/ci/scripts/utils/ci_utils.sh
index 6f2426c388..ce2e039307 100755
--- a/ci/scripts/utils/ci_utils.sh
+++ b/ci/scripts/utils/ci_utils.sh
@@ -102,6 +102,14 @@ function get_pslot () {
}
+function cancel_all_batch_jobs () {
+ local RUNTESTS="${1}"
+ pslot_list=$(get_pslot_list "${RUNTESTS}")
+ for pslot in ${pslot_list}; do
+ cancel_batch_jobs "${pslot}"
+ done
+}
+
function create_experiment () {
local yaml_config="${1}"
diff --git a/ci/scripts/utils/get_host_case_list.py b/ci/scripts/utils/get_host_case_list.py
new file mode 100755
index 0000000000..eb10f29f05
--- /dev/null
+++ b/ci/scripts/utils/get_host_case_list.py
@@ -0,0 +1,32 @@
+#!/usr/bin/env python3
+import os
+from os.path import basename, splitext
+import sys
+import glob
+from wxflow import parse_j2yaml
+from wxflow import AttrDict
+
+_here = os.path.dirname(__file__)
+_top = os.path.abspath(os.path.join(os.path.abspath(_here), '../../..'))
+
+if __name__ == '__main__':
+
+ if len(sys.argv) < 2:
+ print('Usage: get_host_case_list.py ')
+ sys.exit(1)
+
+ host = sys.argv[1]
+
+ case_list = []
+ HOMEgfs = _top
+ data = AttrDict(HOMEgfs=_top)
+ data.update(os.environ)
+
+ case_files = glob.glob(f'{HOMEgfs}/ci/cases/pr/*.yaml')
+ for case_yaml in case_files:
+ case_conf = parse_j2yaml(path=case_yaml, data=data)
+ if 'skip_ci_on_hosts' in case_conf:
+ if host.lower() in [machine.lower() for machine in case_conf.skip_ci_on_hosts]:
+ continue
+ case_list.append(splitext(basename(case_yaml))[0])
+ print(' '.join(case_list))
diff --git a/ci/scripts/utils/wxflow b/ci/scripts/utils/wxflow
new file mode 120000
index 0000000000..54d0558aba
--- /dev/null
+++ b/ci/scripts/utils/wxflow
@@ -0,0 +1 @@
+../../../sorc/wxflow/src/wxflow
\ No newline at end of file
diff --git a/docs/source/clone.rst b/docs/source/clone.rst
index bad3f0e9f6..4f47eb230f 100644
--- a/docs/source/clone.rst
+++ b/docs/source/clone.rst
@@ -39,6 +39,13 @@ For coupled cycling (include new UFSDA) use the `-gu` options during build:
./build_all.sh -gu
+For building with PDLIB for the wave model, use the `-w` options during build:
+
+::
+
+ ./build_all.sh -w
+
+
Build workflow components and link workflow artifacts such as executables, etc.
::
diff --git a/env/AWSPW.env b/env/AWSPW.env
index ea5002ecb9..2dbba67eb3 100755
--- a/env/AWSPW.env
+++ b/env/AWSPW.env
@@ -4,7 +4,7 @@ if [[ $# -ne 1 ]]; then
echo "Must specify an input argument to set runtime environment variables!"
echo "argument can be any one of the following:"
- echo "atmanlrun atmensanlrun aeroanlrun landanlrun"
+ echo "atmanlrun atmensanlrun aeroanlrun snowanl"
echo "anal sfcanl fcst post metp"
echo "eobs eupd ecen efcs epos"
echo "postsnd awips gempak"
diff --git a/env/CONTAINER.env b/env/CONTAINER.env
index b1f55a4c98..bc2d64b4ce 100755
--- a/env/CONTAINER.env
+++ b/env/CONTAINER.env
@@ -4,7 +4,7 @@ if [[ $# -ne 1 ]]; then
echo "Must specify an input argument to set runtime environment variables!"
echo "argument can be any one of the following:"
- echo "atmanlrun atmensanlrun aeroanlrun landanl"
+ echo "atmanlrun atmensanlrun aeroanlrun snowanl"
echo "anal sfcanl fcst post metp"
echo "eobs eupd ecen efcs epos"
echo "postsnd awips gempak"
diff --git a/env/HERA.env b/env/HERA.env
index 7c83a9ea5e..2fa2dd330e 100755
--- a/env/HERA.env
+++ b/env/HERA.env
@@ -4,7 +4,7 @@ if [[ $# -ne 1 ]]; then
echo "Must specify an input argument to set runtime environment variables!"
echo "argument can be any one of the following:"
- echo "atmanlrun atmensanlrun aeroanlrun landanl"
+ echo "atmanlrun atmensanlrun aeroanlrun snowanl"
echo "anal sfcanl fcst post metp"
echo "eobs eupd ecen efcs epos"
echo "postsnd awips gempak"
@@ -42,7 +42,7 @@ if [[ "${step}" = "prep" ]] || [[ "${step}" = "prepbufr" ]]; then
export sys_tp="HERA"
export launcher_PREP="srun"
-elif [[ "${step}" = "preplandobs" ]]; then
+elif [[ "${step}" = "prepsnowobs" ]]; then
export APRUN_CALCFIMS="${launcher} -n 1"
@@ -79,13 +79,13 @@ elif [[ "${step}" = "aeroanlrun" ]]; then
[[ ${NTHREADS_AEROANL} -gt ${nth_max} ]] && export NTHREADS_AEROANL=${nth_max}
export APRUN_AEROANL="${launcher} -n ${npe_aeroanlrun} --cpus-per-task=${NTHREADS_AEROANL}"
-elif [[ "${step}" = "landanl" ]]; then
+elif [[ "${step}" = "snowanl" ]]; then
- nth_max=$((npe_node_max / npe_node_landanl))
+ nth_max=$((npe_node_max / npe_node_snowanl))
- export NTHREADS_LANDANL=${nth_landanl:-${nth_max}}
- [[ ${NTHREADS_LANDANL} -gt ${nth_max} ]] && export NTHREADS_LANDANL=${nth_max}
- export APRUN_LANDANL="${launcher} -n ${npe_landanl} --cpus-per-task=${NTHREADS_LANDANL}"
+ export NTHREADS_SNOWANL=${nth_snowanl:-${nth_max}}
+ [[ ${NTHREADS_SNOWANL} -gt ${nth_max} ]] && export NTHREADS_SNOWANL=${nth_max}
+ export APRUN_SNOWANL="${launcher} -n ${npe_snowanl} --cpus-per-task=${NTHREADS_SNOWANL}"
export APRUN_APPLY_INCR="${launcher} -n 6"
@@ -205,6 +205,13 @@ elif [[ "${step}" = "atmos_products" ]]; then
export USE_CFP="YES" # Use MPMD for downstream product generation on Hera
+elif [[ "${step}" = "oceanice_products" ]]; then
+
+ nth_max=$((npe_node_max / npe_node_oceanice_products))
+
+ export NTHREADS_OCNICEPOST=${nth_oceanice_products:-1}
+ export APRUN_OCNICEPOST="${launcher} -n 1 --cpus-per-task=${NTHREADS_OCNICEPOST}"
+
elif [[ "${step}" = "ecen" ]]; then
nth_max=$((npe_node_max / npe_node_ecen))
diff --git a/env/HERCULES.env b/env/HERCULES.env
index ebfa51398b..7d2aa5f8d0 100755
--- a/env/HERCULES.env
+++ b/env/HERCULES.env
@@ -41,7 +41,7 @@ case ${step} in
export sys_tp="HERCULES"
export launcher_PREP="srun"
;;
- "preplandobs")
+ "prepsnowobs")
export APRUN_CALCFIMS="${launcher} -n 1"
;;
@@ -79,13 +79,13 @@ case ${step} in
[[ ${NTHREADS_AEROANL} -gt ${nth_max} ]] && export NTHREADS_AEROANL=${nth_max}
export APRUN_AEROANL="${launcher} -n ${npe_aeroanlrun} --cpus-per-task=${NTHREADS_AEROANL}"
;;
- "landanl")
+ "snowanl")
- nth_max=$((npe_node_max / npe_node_landanl))
+ nth_max=$((npe_node_max / npe_node_snowanl))
- export NTHREADS_LANDANL=${nth_landanl:-${nth_max}}
- [[ ${NTHREADS_LANDANL} -gt ${nth_max} ]] && export NTHREADS_LANDANL=${nth_max}
- export APRUN_LANDANL="${launcher} -n ${npe_landanl} --cpus-per-task=${NTHREADS_LANDANL}"
+ export NTHREADS_SNOWANL=${nth_snowanl:-${nth_max}}
+ [[ ${NTHREADS_SNOWANL} -gt ${nth_max} ]] && export NTHREADS_SNOWANL=${nth_max}
+ export APRUN_SNOWANL="${launcher} -n ${npe_snowanl} --cpus-per-task=${NTHREADS_SNOWANL}"
export APRUN_APPLY_INCR="${launcher} -n 6"
;;
@@ -207,10 +207,20 @@ case ${step} in
[[ ${NTHREADS_UPP} -gt ${nth_max} ]] && export NTHREADS_UPP=${nth_max}
export APRUN_UPP="${launcher} -n ${npe_upp} --cpus-per-task=${NTHREADS_UPP}"
;;
+
"atmos_products")
export USE_CFP="YES" # Use MPMD for downstream product generation
;;
+
+"oceanice_products")
+
+ nth_max=$((npe_node_max / npe_node_oceanice_products))
+
+ export NTHREADS_OCNICEPOST=${nth_oceanice_products:-1}
+ export APRUN_OCNICEPOST="${launcher} -n 1 --cpus-per-task=${NTHREADS_OCNICEPOST}"
+;;
+
"ecen")
nth_max=$((npe_node_max / npe_node_ecen))
diff --git a/env/JET.env b/env/JET.env
index eada0b1c70..df6666d8dc 100755
--- a/env/JET.env
+++ b/env/JET.env
@@ -4,7 +4,7 @@ if [[ $# -ne 1 ]]; then
echo "Must specify an input argument to set runtime environment variables!"
echo "argument can be any one of the following:"
- echo "atmanlrun atmensanlrun aeroanlrun landanl"
+ echo "atmanlrun atmensanlrun aeroanlrun snowanl"
echo "anal sfcanl fcst post metp"
echo "eobs eupd ecen efcs epos"
echo "postsnd awips gempak"
@@ -33,7 +33,7 @@ if [[ "${step}" = "prep" ]] || [[ "${step}" = "prepbufr" ]]; then
export sys_tp="JET"
export launcher_PREP="srun"
-elif [[ "${step}" = "preplandobs" ]]; then
+elif [[ "${step}" = "prepsnowobs" ]]; then
export APRUN_CALCFIMS="${launcher} -n 1"
@@ -70,13 +70,13 @@ elif [[ "${step}" = "aeroanlrun" ]]; then
[[ ${NTHREADS_AEROANL} -gt ${nth_max} ]] && export NTHREADS_AEROANL=${nth_max}
export APRUN_AEROANL="${launcher} -n ${npe_aeroanlrun}"
-elif [[ "${step}" = "landanl" ]]; then
+elif [[ "${step}" = "snowanl" ]]; then
- nth_max=$((npe_node_max / npe_node_landanl))
+ nth_max=$((npe_node_max / npe_node_snowanl))
- export NTHREADS_LANDANL=${nth_landanl:-${nth_max}}
- [[ ${NTHREADS_LANDANL} -gt ${nth_max} ]] && export NTHREADS_LANDANL=${nth_max}
- export APRUN_LANDANL="${launcher} -n ${npe_landanl}"
+ export NTHREADS_SNOWANL=${nth_snowanl:-${nth_max}}
+ [[ ${NTHREADS_SNOWANL} -gt ${nth_max} ]] && export NTHREADS_SNOWANL=${nth_max}
+ export APRUN_SNOWANL="${launcher} -n ${npe_snowanl}"
export APRUN_APPLY_INCR="${launcher} -n 6"
@@ -190,6 +190,13 @@ elif [[ "${step}" = "atmos_products" ]]; then
export USE_CFP="YES" # Use MPMD for downstream product generation
+elif [[ "${step}" = "oceanice_products" ]]; then
+
+ nth_max=$((npe_node_max / npe_node_oceanice_products))
+
+ export NTHREADS_OCNICEPOST=${nth_oceanice_products:-1}
+ export APRUN_OCNICEPOST="${launcher} -n 1 --cpus-per-task=${NTHREADS_OCNICEPOST}"
+
elif [[ "${step}" = "ecen" ]]; then
nth_max=$((npe_node_max / npe_node_ecen))
diff --git a/env/ORION.env b/env/ORION.env
index c5e94cc559..692fa8ab66 100755
--- a/env/ORION.env
+++ b/env/ORION.env
@@ -4,7 +4,7 @@ if [[ $# -ne 1 ]]; then
echo "Must specify an input argument to set runtime environment variables!"
echo "argument can be any one of the following:"
- echo "atmanlrun atmensanlrun aeroanlrun landanl"
+ echo "atmanlrun atmensanlrun aeroanlrun snowanl"
echo "anal sfcanl fcst post metp"
echo "eobs eupd ecen efcs epos"
echo "postsnd awips gempak"
@@ -40,7 +40,7 @@ if [[ "${step}" = "prep" ]] || [[ "${step}" = "prepbufr" ]]; then
export sys_tp="ORION"
export launcher_PREP="srun"
-elif [[ "${step}" = "preplandobs" ]]; then
+elif [[ "${step}" = "prepsnowobs" ]]; then
export APRUN_CALCFIMS="${launcher} -n 1"
@@ -78,13 +78,13 @@ elif [[ "${step}" = "aeroanlrun" ]]; then
[[ ${NTHREADS_AEROANL} -gt ${nth_max} ]] && export NTHREADS_AEROANL=${nth_max}
export APRUN_AEROANL="${launcher} -n ${npe_aeroanlrun} --cpus-per-task=${NTHREADS_AEROANL}"
-elif [[ "${step}" = "landanl" ]]; then
+elif [[ "${step}" = "snowanl" ]]; then
- nth_max=$((npe_node_max / npe_node_landanl))
+ nth_max=$((npe_node_max / npe_node_snowanl))
- export NTHREADS_LANDANL=${nth_landanl:-${nth_max}}
- [[ ${NTHREADS_LANDANL} -gt ${nth_max} ]] && export NTHREADS_LANDANL=${nth_max}
- export APRUN_LANDANL="${launcher} -n ${npe_landanl} --cpus-per-task=${NTHREADS_LANDANL}"
+ export NTHREADS_SNOWANL=${nth_snowanl:-${nth_max}}
+ [[ ${NTHREADS_SNOWANL} -gt ${nth_max} ]] && export NTHREADS_SNOWANL=${nth_max}
+ export APRUN_SNOWANL="${launcher} -n ${npe_snowanl} --cpus-per-task=${NTHREADS_SNOWANL}"
export APRUN_APPLY_INCR="${launcher} -n 6"
@@ -209,6 +209,13 @@ elif [[ "${step}" = "atmos_products" ]]; then
export USE_CFP="YES" # Use MPMD for downstream product generation
+elif [[ "${step}" = "oceanice_products" ]]; then
+
+ nth_max=$((npe_node_max / npe_node_oceanice_products))
+
+ export NTHREADS_OCNICEPOST=${nth_oceanice_products:-1}
+ export APRUN_OCNICEPOST="${launcher} -n 1 --cpus-per-task=${NTHREADS_OCNICEPOST}"
+
elif [[ "${step}" = "ecen" ]]; then
nth_max=$((npe_node_max / npe_node_ecen))
diff --git a/env/S4.env b/env/S4.env
index b103e865d3..ab564eb974 100755
--- a/env/S4.env
+++ b/env/S4.env
@@ -4,7 +4,7 @@ if [[ $# -ne 1 ]]; then
echo "Must specify an input argument to set runtime environment variables!"
echo "argument can be any one of the following:"
- echo "atmanlrun atmensanlrun aeroanlrun landanl"
+ echo "atmanlrun atmensanlrun aeroanlrun snowanl"
echo "anal sfcanl fcst post metp"
echo "eobs eupd ecen efcs epos"
echo "postsnd awips gempak"
@@ -33,7 +33,7 @@ if [[ "${step}" = "prep" ]] || [[ "${step}" = "prepbufr" ]]; then
export sys_tp="S4"
export launcher_PREP="srun"
-elif [[ "${step}" = "preplandobs" ]]; then
+elif [[ "${step}" = "prepsnowobs" ]]; then
export APRUN_CALCFIMS="${launcher} -n 1"
@@ -70,13 +70,13 @@ elif [[ "${step}" = "aeroanlrun" ]]; then
[[ ${NTHREADS_AEROANL} -gt ${nth_max} ]] && export NTHREADS_AEROANL=${nth_max}
export APRUN_AEROANL="${launcher} -n ${npe_aeroanlrun}"
-elif [[ "${step}" = "landanl" ]]; then
+elif [[ "${step}" = "snowanl" ]]; then
- nth_max=$((npe_node_max / npe_node_landanl))
+ nth_max=$((npe_node_max / npe_node_snowanl))
- export NTHREADS_LANDANL=${nth_landanl:-${nth_max}}
- [[ ${NTHREADS_LANDANL} -gt ${nth_max} ]] && export NTHREADS_LANDANL=${nth_max}
- export APRUN_LANDANL="${launcher} -n ${npe_landanl}"
+ export NTHREADS_SNOWANL=${nth_snowanl:-${nth_max}}
+ [[ ${NTHREADS_SNOWANL} -gt ${nth_max} ]] && export NTHREADS_SNOWANL=${nth_max}
+ export APRUN_SNOWANL="${launcher} -n ${npe_snowanl}"
export APRUN_APPLY_INCR="${launcher} -n 6"
@@ -177,6 +177,13 @@ elif [[ "${step}" = "atmos_products" ]]; then
export USE_CFP="YES" # Use MPMD for downstream product generation
+elif [[ "${step}" = "oceanice_products" ]]; then
+
+ nth_max=$((npe_node_max / npe_node_oceanice_products))
+
+ export NTHREADS_OCNICEPOST=${nth_oceanice_products:-1}
+ export APRUN_OCNICEPOST="${launcher} -n 1 --cpus-per-task=${NTHREADS_OCNICEPOST}"
+
elif [[ "${step}" = "ecen" ]]; then
nth_max=$((npe_node_max / npe_node_ecen))
diff --git a/env/WCOSS2.env b/env/WCOSS2.env
index 307ad71c43..ebb55a4077 100755
--- a/env/WCOSS2.env
+++ b/env/WCOSS2.env
@@ -4,7 +4,7 @@ if [[ $# -ne 1 ]]; then
echo "Must specify an input argument to set runtime environment variables!"
echo "argument can be any one of the following:"
- echo "atmanlrun atmensanlrun aeroanlrun landanl"
+ echo "atmanlrun atmensanlrun aeroanlrun snowanl"
echo "anal sfcanl fcst post metp"
echo "eobs eupd ecen esfc efcs epos"
echo "postsnd awips gempak"
@@ -27,7 +27,7 @@ if [[ "${step}" = "prep" ]] || [[ "${step}" = "prepbufr" ]]; then
export sys_tp="wcoss2"
export launcher_PREP="mpiexec"
-elif [[ "${step}" = "preplandobs" ]]; then
+elif [[ "${step}" = "prepsnowobs" ]]; then
export APRUN_CALCFIMS="${launcher} -n 1"
@@ -64,13 +64,13 @@ elif [[ "${step}" = "aeroanlrun" ]]; then
[[ ${NTHREADS_AEROANL} -gt ${nth_max} ]] && export NTHREADS_AEROANL=${nth_max}
export APRUN_AEROANL="${launcher} -n ${npe_aeroanlrun}"
-elif [[ "${step}" = "landanl" ]]; then
+elif [[ "${step}" = "snowanl" ]]; then
- nth_max=$((npe_node_max / npe_node_landanl))
+ nth_max=$((npe_node_max / npe_node_snowanl))
- export NTHREADS_LANDANL=${nth_landanl:-${nth_max}}
- [[ ${NTHREADS_LANDANL} -gt ${nth_max} ]] && export NTHREADS_LANDANL=${nth_max}
- export APRUN_LANDANL="${launcher} -n ${npe_landanl}"
+ export NTHREADS_SNOWANL=${nth_snowanl:-${nth_max}}
+ [[ ${NTHREADS_SNOWANL} -gt ${nth_max} ]] && export NTHREADS_SNOWANL=${nth_max}
+ export APRUN_SNOWANL="${launcher} -n ${npe_snowanl}"
export APRUN_APPLY_INCR="${launcher} -n 6"
@@ -176,6 +176,8 @@ elif [[ "${step}" = "fcst" ]] || [[ "${step}" = "efcs" ]]; then
if [[ "${step}" = "fcst" ]]; then
export OMP_PLACES=cores
export OMP_STACKSIZE=2048M
+ export MPICH_MPIIO_HINTS="*:romio_cb_write=disable"
+ export FI_OFI_RXM_SAR_LIMIT=3145728
elif [[ "${step}" = "efcs" ]]; then
export MPICH_MPIIO_HINTS="*:romio_cb_write=disable"
export FI_OFI_RXM_SAR_LIMIT=3145728
@@ -193,6 +195,13 @@ elif [[ "${step}" = "atmos_products" ]]; then
export USE_CFP="YES" # Use MPMD for downstream product generation
+elif [[ "${step}" = "oceanice_products" ]]; then
+
+ nth_max=$((npe_node_max / npe_node_oceanice_products))
+
+ export NTHREADS_OCNICEPOST=${nth_oceanice_products:-1}
+ export APRUN_OCNICEPOST="${launcher} -n 1 -ppn ${npe_node_oceanice_products} --cpu-bind depth --depth ${NTHREADS_OCNICEPOST}"
+
elif [[ "${step}" = "ecen" ]]; then
nth_max=$((npe_node_max / npe_node_ecen))
diff --git a/gempak/ush/gdas_ecmwf_meta_ver.sh b/gempak/ush/gdas_ecmwf_meta_ver.sh
index e4fffd9c8a..5d519c7cc5 100755
--- a/gempak/ush/gdas_ecmwf_meta_ver.sh
+++ b/gempak/ush/gdas_ecmwf_meta_ver.sh
@@ -30,7 +30,7 @@ PDY2=$(echo ${PDY} | cut -c3-)
#
# Copy in datatype table to define gdfile type
#
-cp $FIXgempak/datatype.tbl datatype.tbl
+cp ${HOMEgfs}/gempak/fix/datatype.tbl datatype.tbl
export err=$?
if [[ $err -ne 0 ]] ; then
echo " File datatype.tbl does not exist."
diff --git a/gempak/ush/gdas_meta_loop.sh b/gempak/ush/gdas_meta_loop.sh
index cd0d9b781b..927958a0ad 100755
--- a/gempak/ush/gdas_meta_loop.sh
+++ b/gempak/ush/gdas_meta_loop.sh
@@ -33,7 +33,7 @@ export pgm=gdplot2_nc;. prep_step; startmsg
#
# Copy in datatype table to define gdfile type
#
-cp $FIXgempak/datatype.tbl datatype.tbl
+cp ${HOMEgfs}/gempak/fix/datatype.tbl datatype.tbl
export err=$?
if [[ $err -ne 0 ]] ; then
echo " File datatype.tbl does not exist."
diff --git a/gempak/ush/gdas_ukmet_meta_ver.sh b/gempak/ush/gdas_ukmet_meta_ver.sh
index 845fa1cc6b..c7267ed09c 100755
--- a/gempak/ush/gdas_ukmet_meta_ver.sh
+++ b/gempak/ush/gdas_ukmet_meta_ver.sh
@@ -29,7 +29,7 @@ PDY2=$(echo ${PDY} | cut -c3-)
#
# Copy in datatype table to define gdfile type
#
-cp $FIXgempak/datatype.tbl datatype.tbl
+cp ${HOMEgfs}/gempak/fix/datatype.tbl datatype.tbl
#
# DEFINE 1 CYCLE AGO
diff --git a/gempak/ush/gempak_gdas_f000_gif.sh b/gempak/ush/gempak_gdas_f000_gif.sh
index cdf7659155..3e25d4090f 100755
--- a/gempak/ush/gempak_gdas_f000_gif.sh
+++ b/gempak/ush/gempak_gdas_f000_gif.sh
@@ -13,9 +13,6 @@
#
#########################################################################
- msg=" Make GEMPAK GIFS utility"
- postmsg "$jlogfile" "$msg"
-
set -x
MAPAREA="normal"
@@ -25,7 +22,7 @@
pixels="1728;1472"
- cp $FIXgempak/coltbl.spc coltbl.xwp
+ cp ${HOMEgfs}/gempak/fix/coltbl.spc coltbl.xwp
#################################################################
# NORTHERN HEMISPHERE ANALYSIS CHARTS #
@@ -488,8 +485,4 @@ if [ $SENDCOM = YES ]; then
fi
-
- msg=" GEMPAK_GIF ${fhr} hour completed normally"
- postmsg "$jlogfile" "$msg"
-
exit
diff --git a/gempak/ush/gempak_gfs_f00_gif.sh b/gempak/ush/gempak_gfs_f00_gif.sh
index 2a7cca5c9f..ad16958be2 100755
--- a/gempak/ush/gempak_gfs_f00_gif.sh
+++ b/gempak/ush/gempak_gfs_f00_gif.sh
@@ -15,9 +15,6 @@
#
#########################################################################
- msg=" Make GEMPAK GIFS utility"
- postmsg "$jlogfile" "$msg"
-
set -x
MAPAREA="normal"
@@ -26,7 +23,7 @@
pixels="1728;1472"
- cp $FIXgempak/coltbl.spc coltbl.xwp
+ cp ${HOMEgfs}/gempak/fix/coltbl.spc coltbl.xwp
#################################################################
# ANALYSIS CHARTS #
@@ -596,7 +593,4 @@ if [ $SENDCOM = YES ]; then
${USHgfs}/make_tif.sh
fi
- msg=" GEMPAK_GIF ${fhr} hour completed normally"
- postmsg "$jlogfile" "$msg"
-
exit
diff --git a/gempak/ush/gempak_gfs_f12_gif.sh b/gempak/ush/gempak_gfs_f12_gif.sh
index 611252a2e2..0150affac7 100755
--- a/gempak/ush/gempak_gfs_f12_gif.sh
+++ b/gempak/ush/gempak_gfs_f12_gif.sh
@@ -15,9 +15,6 @@
#
#########################################################################
- msg=" Make GEMPAK GIFS utility"
- postmsg "$jlogfile" "$msg"
-
set -x
MAPAREA="normal"
@@ -26,7 +23,7 @@
pixels="1728;1472"
- cp $FIXgempak/coltbl.spc coltbl.xwp
+ cp ${HOMEgfs}/gempak/fix/coltbl.spc coltbl.xwp
##########################################################
# 12HR FORECAST CHARTS #
@@ -207,7 +204,4 @@ if [ $SENDCOM = YES ]; then
fi
- msg=" GEMPAK_GIF ${fhr} hour completed normally"
- postmsg "$jlogfile" "$msg"
-
exit
diff --git a/gempak/ush/gempak_gfs_f24_gif.sh b/gempak/ush/gempak_gfs_f24_gif.sh
index 53670a29bd..cad24de9f6 100755
--- a/gempak/ush/gempak_gfs_f24_gif.sh
+++ b/gempak/ush/gempak_gfs_f24_gif.sh
@@ -16,13 +16,6 @@
#
#########################################################################
-
-
- msg=" Make GEMPAK GIFS utility"
- postmsg "$jlogfile" "$msg"
-
-
-
set -x
@@ -32,7 +25,7 @@
pixels="1728;1472"
- cp $FIXgempak/coltbl.spc coltbl.xwp
+ cp ${HOMEgfs}/gempak/fix/coltbl.spc coltbl.xwp
@@ -224,8 +217,4 @@ if [ $SENDCOM = YES ]; then
fi
-
- msg=" GEMPAK_GIF ${fhr} hour completed normally"
- postmsg "$jlogfile" "$msg"
-
exit
diff --git a/gempak/ush/gempak_gfs_f36_gif.sh b/gempak/ush/gempak_gfs_f36_gif.sh
index e1999090c0..60644e2fa3 100755
--- a/gempak/ush/gempak_gfs_f36_gif.sh
+++ b/gempak/ush/gempak_gfs_f36_gif.sh
@@ -16,12 +16,6 @@
#
#########################################################################
-
-
- msg=" Make GEMPAK GIFS utility"
- postmsg "$jlogfile" "$msg"
-
-
set -x
@@ -31,7 +25,7 @@
pixels="1728;1472"
- cp $FIXgempak/coltbl.spc coltbl.xwp
+ cp ${HOMEgfs}/gempak/fix/coltbl.spc coltbl.xwp
@@ -223,9 +217,4 @@ if [ $SENDCOM = YES ]; then
fi
-
-
- msg=" GEMPAK_GIF ${fhr} hour completed normally"
- postmsg "$jlogfile" "$msg"
-
exit
diff --git a/gempak/ush/gempak_gfs_f48_gif.sh b/gempak/ush/gempak_gfs_f48_gif.sh
index 1e0ba532fd..b63f644ee6 100755
--- a/gempak/ush/gempak_gfs_f48_gif.sh
+++ b/gempak/ush/gempak_gfs_f48_gif.sh
@@ -16,12 +16,6 @@
#
#########################################################################
-
-
- msg=" Make GEMPAK GIFS utility"
- postmsg "$jlogfile" "$msg"
-
-
set -x
@@ -31,7 +25,7 @@
pixels="1728;1472"
- cp $FIXgempak/coltbl.spc coltbl.xwp
+ cp ${HOMEgfs}/gempak/fix/coltbl.spc coltbl.xwp
@@ -223,9 +217,4 @@ if [ $SENDCOM = YES ]; then
fi
-
-
- msg=" GEMPAK_GIF ${fhr} hour completed normally"
- postmsg "$jlogfile" "$msg"
-
exit
diff --git a/gempak/ush/gfs_meta_ak.sh b/gempak/ush/gfs_meta_ak.sh
index c258b7e83a..03db6fa088 100755
--- a/gempak/ush/gfs_meta_ak.sh
+++ b/gempak/ush/gfs_meta_ak.sh
@@ -24,7 +24,7 @@ set -xa
rm -rf $DATA/ak
mkdir -p -m 775 $DATA/ak
cd $DATA/ak
-cp $FIXgempak/datatype.tbl datatype.tbl
+cp ${HOMEgfs}/gempak/fix/datatype.tbl datatype.tbl
device="nc | gfs.meta.ak"
PDY2=$(echo $PDY | cut -c3-)
diff --git a/gempak/ush/gfs_meta_bwx.sh b/gempak/ush/gfs_meta_bwx.sh
index f5b4e1d944..af7f43e3de 100755
--- a/gempak/ush/gfs_meta_bwx.sh
+++ b/gempak/ush/gfs_meta_bwx.sh
@@ -24,7 +24,7 @@ set -x
export PS4='BWX:$SECONDS + '
mkdir -p -m 775 $DATA/BWX
cd $DATA/BWX
-cp $FIXgempak/datatype.tbl datatype.tbl
+cp ${HOMEgfs}/gempak/fix/datatype.tbl datatype.tbl
mdl=gfs
MDL="GFS"
diff --git a/gempak/ush/gfs_meta_comp.sh b/gempak/ush/gfs_meta_comp.sh
index 9bd27c5736..e033d11bd8 100755
--- a/gempak/ush/gfs_meta_comp.sh
+++ b/gempak/ush/gfs_meta_comp.sh
@@ -29,7 +29,7 @@ export PS4='COMP:$SECONDS + '
rm -Rf $DATA/COMP $DATA/GEMPAK_META_COMP
mkdir -p -m 775 $DATA/COMP $DATA/GEMPAK_META_COMP
cd $DATA/COMP
-cp $FIXgempak/datatype.tbl datatype.tbl
+cp ${HOMEgfs}/gempak/fix/datatype.tbl datatype.tbl
export COMPONENT=${COMPONENT:-atmos}
diff --git a/gempak/ush/gfs_meta_crb.sh b/gempak/ush/gfs_meta_crb.sh
index 82fa7795e8..181afab6c1 100755
--- a/gempak/ush/gfs_meta_crb.sh
+++ b/gempak/ush/gfs_meta_crb.sh
@@ -17,7 +17,7 @@ set -x
export PS4='crb:$SECONDS + '
mkdir -p -m 775 $DATA/crb
cd $DATA/crb
-cp $FIXgempak/datatype.tbl datatype.tbl
+cp ${HOMEgfs}/gempak/fix/datatype.tbl datatype.tbl
#
mdl=gfs
MDL=GFS
diff --git a/gempak/ush/gfs_meta_hi.sh b/gempak/ush/gfs_meta_hi.sh
index 2b47474e12..6167855ae2 100755
--- a/gempak/ush/gfs_meta_hi.sh
+++ b/gempak/ush/gfs_meta_hi.sh
@@ -19,7 +19,7 @@
set -xa
mkdir -p -m 775 $DATA/mrfhi
cd $DATA/mrfhi
-cp $FIXgempak/datatype.tbl datatype.tbl
+cp ${HOMEgfs}/gempak/fix/datatype.tbl datatype.tbl
device="nc | mrfhi.meta"
diff --git a/gempak/ush/gfs_meta_hur.sh b/gempak/ush/gfs_meta_hur.sh
index aed25d6d78..04224ee39b 100755
--- a/gempak/ush/gfs_meta_hur.sh
+++ b/gempak/ush/gfs_meta_hur.sh
@@ -25,7 +25,7 @@ set -x
export PS4='hur:$SECONDS + '
mkdir -p -m 775 $DATA/hur
cd $DATA/hur
-cp $FIXgempak/datatype.tbl datatype.tbl
+cp ${HOMEgfs}/gempak/fix/datatype.tbl datatype.tbl
mdl=gfs
MDL=GFS
diff --git a/gempak/ush/gfs_meta_mar_atl.sh b/gempak/ush/gfs_meta_mar_atl.sh
index c8db3e59d4..b76a25f247 100755
--- a/gempak/ush/gfs_meta_mar_atl.sh
+++ b/gempak/ush/gfs_meta_mar_atl.sh
@@ -12,7 +12,7 @@ set -x
export PS4='MAR_ATL:$SECONDS + '
mkdir -p -m 775 $DATA/MAR_ATL
cd $DATA/MAR_ATL
-cp $FIXgempak/datatype.tbl datatype.tbl
+cp ${HOMEgfs}/gempak/fix/datatype.tbl datatype.tbl
mdl=gfs
MDL="GFS"
diff --git a/gempak/ush/gfs_meta_mar_comp.sh b/gempak/ush/gfs_meta_mar_comp.sh
index a55fa3c642..0c179b6270 100755
--- a/gempak/ush/gfs_meta_mar_comp.sh
+++ b/gempak/ush/gfs_meta_mar_comp.sh
@@ -18,7 +18,7 @@ rm -Rf $DATA/GEMPAK_META_MAR
mkdir -p -m 775 $DATA/GEMPAK_META_MAR $DATA/MAR_COMP
cd $DATA/MAR_COMP
-cp $FIXgempak/datatype.tbl datatype.tbl
+cp ${HOMEgfs}/gempak/fix/datatype.tbl datatype.tbl
export COMPONENT=${COMPONENT:-atmos}
diff --git a/gempak/ush/gfs_meta_mar_pac.sh b/gempak/ush/gfs_meta_mar_pac.sh
index b44f60a2f7..06165e837c 100755
--- a/gempak/ush/gfs_meta_mar_pac.sh
+++ b/gempak/ush/gfs_meta_mar_pac.sh
@@ -12,7 +12,7 @@ set -x
export PS4='MAR_PAC:$SECONDS + '
mkdir -p -m 775 $DATA/MAR_PAC
cd $DATA/MAR_PAC
-cp $FIXgempak/datatype.tbl datatype.tbl
+cp ${HOMEgfs}/gempak/fix/datatype.tbl datatype.tbl
mdl=gfs
MDL="GFS"
diff --git a/gempak/ush/gfs_meta_mar_ql.sh b/gempak/ush/gfs_meta_mar_ql.sh
index f1abf3d395..71ee9d2fee 100755
--- a/gempak/ush/gfs_meta_mar_ql.sh
+++ b/gempak/ush/gfs_meta_mar_ql.sh
@@ -12,7 +12,7 @@ set -x
export PS4='MAR_QL_F${fend}:$SECONDS + '
mkdir -p -m 775 $DATA/MAR_QL
cd $DATA/MAR_QL
-cp $FIXgempak/datatype.tbl datatype.tbl
+cp ${HOMEgfs}/gempak/fix/datatype.tbl datatype.tbl
mdl=gfs
MDL="GFS"
diff --git a/gempak/ush/gfs_meta_mar_skewt.sh b/gempak/ush/gfs_meta_mar_skewt.sh
index 040e09e932..a49439d6a0 100755
--- a/gempak/ush/gfs_meta_mar_skewt.sh
+++ b/gempak/ush/gfs_meta_mar_skewt.sh
@@ -12,7 +12,7 @@ set -x
export PS4='MAR_SKEWT:$SECONDS + '
mkdir -p -m 775 $DATA/MAR_SKEWT
cd $DATA/MAR_SKEWT
-cp $FIXgempak/datatype.tbl datatype.tbl
+cp ${HOMEgfs}/gempak/fix/datatype.tbl datatype.tbl
mdl=gfs
MDL="GFS"
diff --git a/gempak/ush/gfs_meta_mar_ver.sh b/gempak/ush/gfs_meta_mar_ver.sh
index 63ccba00ed..79f4fc7096 100755
--- a/gempak/ush/gfs_meta_mar_ver.sh
+++ b/gempak/ush/gfs_meta_mar_ver.sh
@@ -12,7 +12,7 @@ set -x
export PS4='MAR_VER:$SECONDS + '
mkdir -p -m 775 $DATA/MAR_VER
cd $DATA/MAR_VER
-cp $FIXgempak/datatype.tbl datatype.tbl
+cp ${HOMEgfs}/gempak/fix/datatype.tbl datatype.tbl
mdl=gfs
MDL="GFS"
diff --git a/gempak/ush/gfs_meta_nhsh.sh b/gempak/ush/gfs_meta_nhsh.sh
index 3e0146270e..6e6981c2df 100755
--- a/gempak/ush/gfs_meta_nhsh.sh
+++ b/gempak/ush/gfs_meta_nhsh.sh
@@ -19,7 +19,7 @@
set -xa
mkdir -p -m 775 $DATA/mrfnhsh
cd $DATA/mrfnhsh
-cp $FIXgempak/datatype.tbl datatype.tbl
+cp ${HOMEgfs}/gempak/fix/datatype.tbl datatype.tbl
PDY2=$(echo $PDY | cut -c3-)
diff --git a/gempak/ush/gfs_meta_opc_na_ver b/gempak/ush/gfs_meta_opc_na_ver
index 8d5f394b3d..3c0798ebc6 100755
--- a/gempak/ush/gfs_meta_opc_na_ver
+++ b/gempak/ush/gfs_meta_opc_na_ver
@@ -12,7 +12,7 @@ set -x
export PS4='OPC_NA_VER_F${fend}:$SECONDS + '
mkdir -p -m 775 $DATA/OPC_NA_VER_F${fend}
cd $DATA/OPC_NA_VER_F${fend}
-cp $FIXgempak/datatype.tbl datatype.tbl
+cp ${HOMEgfs}/gempak/fix/datatype.tbl datatype.tbl
export COMPONENT=${COMPONENT:-atmos}
diff --git a/gempak/ush/gfs_meta_opc_np_ver b/gempak/ush/gfs_meta_opc_np_ver
index 5cb9fba3c9..f9f3fc88ab 100755
--- a/gempak/ush/gfs_meta_opc_np_ver
+++ b/gempak/ush/gfs_meta_opc_np_ver
@@ -12,7 +12,7 @@ set -x
export PS4='OPC_NP_VER_F${fend}:$SECONDS + '
mkdir -p -m 775 $DATA/OPC_NP_VER_F${fend}
cd $DATA/OPC_NP_VER_F${fend}
-cp $FIXgempak/datatype.tbl datatype.tbl
+cp ${HOMEgfs}/gempak/fix/datatype.tbl datatype.tbl
#
export COMPONENT=${COMPONENT:-atmos}
diff --git a/gempak/ush/gfs_meta_precip.sh b/gempak/ush/gfs_meta_precip.sh
index cf3db9cbae..a21bcc7d98 100755
--- a/gempak/ush/gfs_meta_precip.sh
+++ b/gempak/ush/gfs_meta_precip.sh
@@ -11,7 +11,7 @@ set -x
export PS4='qpf:$SECONDS + '
mkdir -p -m 775 $DATA/precip
cd $DATA/precip
-cp $FIXgempak/datatype.tbl datatype.tbl
+cp ${HOMEgfs}/gempak/fix/datatype.tbl datatype.tbl
#
# Set model and metafile naming conventions
diff --git a/gempak/ush/gfs_meta_qpf.sh b/gempak/ush/gfs_meta_qpf.sh
index 49ca0d8bd4..85cd7b94f8 100755
--- a/gempak/ush/gfs_meta_qpf.sh
+++ b/gempak/ush/gfs_meta_qpf.sh
@@ -26,7 +26,7 @@ set -x
export PS4='qpf:$SECONDS + '
mkdir -p -m 775 $DATA/qpf
cd $DATA/qpf
-cp $FIXgempak/datatype.tbl datatype.tbl
+cp ${HOMEgfs}/gempak/fix/datatype.tbl datatype.tbl
mdl=gfs
MDL=GFS
diff --git a/gempak/ush/gfs_meta_sa.sh b/gempak/ush/gfs_meta_sa.sh
index 47984e641d..1bd2ff7b59 100755
--- a/gempak/ush/gfs_meta_sa.sh
+++ b/gempak/ush/gfs_meta_sa.sh
@@ -24,7 +24,7 @@ set -x
export PS4='SA:$SECONDS + '
mkdir -p -m 775 $DATA/SA
cd $DATA/SA
-cp $FIXgempak/datatype.tbl datatype.tbl
+cp ${HOMEgfs}/gempak/fix/datatype.tbl datatype.tbl
mdl=gfs
MDL=GFS
diff --git a/gempak/ush/gfs_meta_sa2.sh b/gempak/ush/gfs_meta_sa2.sh
index a566031030..de4b7775da 100755
--- a/gempak/ush/gfs_meta_sa2.sh
+++ b/gempak/ush/gfs_meta_sa2.sh
@@ -19,7 +19,7 @@ set -x
echo " start with ukmet_gfs_meta_sa2.sh"
export PS4='SA2:$SECONDS + '
-cp $FIXgempak/datatype.tbl datatype.tbl
+cp ${HOMEgfs}/gempak/fix/datatype.tbl datatype.tbl
export COMPONENT=${COMPONENT:-atmos}
diff --git a/gempak/ush/gfs_meta_trop.sh b/gempak/ush/gfs_meta_trop.sh
index d0cc0dbd14..a66cb22c74 100755
--- a/gempak/ush/gfs_meta_trop.sh
+++ b/gempak/ush/gfs_meta_trop.sh
@@ -29,7 +29,7 @@ set -x
export PS4='TROP:$SECONDS + '
mkdir -p -m 775 $DATA/TROP
cd $DATA/TROP
-cp $FIXgempak/datatype.tbl datatype.tbl
+cp ${HOMEgfs}/gempak/fix/datatype.tbl datatype.tbl
mdl=gfs
MDL=GFS
diff --git a/gempak/ush/gfs_meta_us.sh b/gempak/ush/gfs_meta_us.sh
index 7a818c338b..495cbc5839 100755
--- a/gempak/ush/gfs_meta_us.sh
+++ b/gempak/ush/gfs_meta_us.sh
@@ -22,7 +22,7 @@ set -xa
rm -rf $DATA/us
mkdir -p -m 775 $DATA/us
cd $DATA/us
-cp $FIXgempak/datatype.tbl datatype.tbl
+cp ${HOMEgfs}/gempak/fix/datatype.tbl datatype.tbl
device="nc | gfs.meta"
PDY2=$(echo $PDY | cut -c3-)
diff --git a/gempak/ush/gfs_meta_usext.sh b/gempak/ush/gfs_meta_usext.sh
index dc522bb896..3db7c27be4 100755
--- a/gempak/ush/gfs_meta_usext.sh
+++ b/gempak/ush/gfs_meta_usext.sh
@@ -28,12 +28,12 @@
set -xa
mkdir -p -m 775 $DATA/mrfus
cd $DATA/mrfus
-cp $FIXgempak/datatype.tbl datatype.tbl
+cp ${HOMEgfs}/gempak/fix/datatype.tbl datatype.tbl
device="nc | mrf.meta"
-#XXW cp $FIXgempak/model/gfs/ak_sfstns.tbl alaska.tbl
-cp $FIXgempak/ak_sfstns.tbl alaska.tbl
+#XXW cp ${HOMEgfs}/gempak/fix/model/gfs/ak_sfstns.tbl alaska.tbl
+cp ${HOMEgfs}/gempak/fix/ak_sfstns.tbl alaska.tbl
month=$(echo $PDY | cut -c5,6)
if [ $month -ge 5 -a $month -le 9 ] ; then
diff --git a/gempak/ush/gfs_meta_ver.sh b/gempak/ush/gfs_meta_ver.sh
index d63f6bc6df..89a413952e 100755
--- a/gempak/ush/gfs_meta_ver.sh
+++ b/gempak/ush/gfs_meta_ver.sh
@@ -26,7 +26,7 @@ set -x
export PS4='VER:$SECONDS + '
mkdir -p -m 775 $DATA/VER
cd $DATA/VER
-cp $FIXgempak/datatype.tbl datatype.tbl
+cp ${HOMEgfs}/gempak/fix/datatype.tbl datatype.tbl
export COMPONENT=${COMPONENT:-atmos}
diff --git a/jobs/JGDAS_ATMOS_GEMPAK b/jobs/JGDAS_ATMOS_GEMPAK
index 1535e07ae3..55aca00cee 100755
--- a/jobs/JGDAS_ATMOS_GEMPAK
+++ b/jobs/JGDAS_ATMOS_GEMPAK
@@ -11,7 +11,6 @@ export EXECgfs=${EXECgfs:-${HOMEgfs}/exec}
export PARMgfs=${PARMgfs:-${HOMEgfs}/parm}
export PARMwmo=${PARMwmo:-${HOMEgfs}/parm/wmo}
export PARMproduct=${PARMproduct:-${HOMEgfs}/parm/product}
-export FIXgempak=${FIXgempak:-${HOMEgfs}/gempak/fix}
export USHgempak=${USHgempak:-${HOMEgfs}/gempak/ush}
export SRCgfs=${SRCgfs:-${HOMEgfs}/scripts}
export UTILgfs=${UTILgfs:-${HOMEgfs}/util}
diff --git a/jobs/JGDAS_ATMOS_GEMPAK_META_NCDC b/jobs/JGDAS_ATMOS_GEMPAK_META_NCDC
index 6948d29df6..92b56f10b8 100755
--- a/jobs/JGDAS_ATMOS_GEMPAK_META_NCDC
+++ b/jobs/JGDAS_ATMOS_GEMPAK_META_NCDC
@@ -17,7 +17,6 @@ export EXECgfs=${EXECgfs:-${HOMEgfs}/exec}
export PARMgfs=${PARMgfs:-${HOMEgfs}/parm}
export PARMwmo=${PARMwmo:-${HOMEgfs}/parm/wmo}
export PARMproduct=${PARMproduct:-${HOMEgfs}/parm/product}
-export FIXgempak=${FIXgempak:-${HOMEgfs}/gempak/fix}
export USHgempak=${USHgempak:-${HOMEgfs}/gempak/ush}
export SRCgfs=${SRCgfs:-${HOMEgfs}/scripts}
export UTILgfs=${UTILgfs:-${HOMEgfs}/util}
@@ -26,7 +25,7 @@ export UTILgfs=${UTILgfs:-${HOMEgfs}/util}
# Now set up GEMPAK/NTRANS environment
#
-cp ${FIXgempak}/datatype.tbl datatype.tbl
+cp ${HOMEgfs}/gempak/fix/datatype.tbl datatype.tbl
###################################
# Specify NET and RUN Name and model
diff --git a/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG b/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG
index 516c7a403b..d2277bb0da 100755
--- a/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG
+++ b/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG
@@ -14,7 +14,6 @@ export EXECgfs=${EXECgfs:-${HOMEgfs}/exec}
export PARMgfs=${PARMgfs:-${HOMEgfs}/parm}
export PARMwmo=${PARMwmo:-${HOMEgfs}/parm/wmo}
export PARMproduct=${PARMproduct:-${HOMEgfs}/parm/product}
-export FIXgfs=${FIXgfs:-${HOMEgfs}/fix}
###################################
# Specify NET and RUN Name and model
diff --git a/jobs/JGFS_ATMOS_AWIPS_G2 b/jobs/JGFS_ATMOS_AWIPS_G2
index 5bd7749997..3503a92b08 100755
--- a/jobs/JGFS_ATMOS_AWIPS_G2
+++ b/jobs/JGFS_ATMOS_AWIPS_G2
@@ -17,7 +17,6 @@ export EXECgfs=${EXECgfs:-${HOMEgfs}/exec}
export PARMgfs=${PARMgfs:-${HOMEgfs}/parm}
export PARMwmo=${PARMwmo:-${HOMEgfs}/parm/wmo}
export PARMproduct=${PARMproduct:-${HOMEgfs}/parm/product}
-export FIXgfs=${FIXgfs:-${HOMEgfs}/fix}
export UTILgfs=${UTILgfs:-${HOMEgfs}/util}
###################################
diff --git a/jobs/JGFS_ATMOS_FBWIND b/jobs/JGFS_ATMOS_FBWIND
index e04b06c0d6..d640907c5c 100755
--- a/jobs/JGFS_ATMOS_FBWIND
+++ b/jobs/JGFS_ATMOS_FBWIND
@@ -16,7 +16,6 @@ export EXECgfs=${EXECgfs:-${HOMEgfs}/exec}
export PARMgfs=${PARMgfs:-${HOMEgfs}/parm}
export PARMwmo=${PARMwmo:-${HOMEgfs}/parm/wmo}
export PARMproduct=${PARMproduct:-${HOMEgfs}/parm/product}
-export FIXgfs=${FIXgfs:-${HOMEgfs}/fix}
export UTILgfs=${UTILgfs:-${HOMEgfs}/util}
###################################
diff --git a/jobs/JGFS_ATMOS_GEMPAK b/jobs/JGFS_ATMOS_GEMPAK
index ddf10342d2..ff7479daf2 100755
--- a/jobs/JGFS_ATMOS_GEMPAK
+++ b/jobs/JGFS_ATMOS_GEMPAK
@@ -10,7 +10,6 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "gempak" -c "base gempak"
export EXECgfs=${EXECgfs:-${HOMEgfs}/exec}
export PARMgfs=${PARMgfs:-${HOMEgfs}/parm}
export EXPDIR=${EXPDIR:-${HOMEgfs}/parm/config}
-export FIXgempak=${FIXgempak:-${HOMEgfs}/gempak/fix}
export USHgempak=${USHgempak:-${HOMEgfs}/gempak/ush}
export SRCgfs=${SRCgfs:-${HOMEgfs}/scripts}
diff --git a/jobs/JGFS_ATMOS_GEMPAK_META b/jobs/JGFS_ATMOS_GEMPAK_META
index 8e1c05763f..f3573419c5 100755
--- a/jobs/JGFS_ATMOS_GEMPAK_META
+++ b/jobs/JGFS_ATMOS_GEMPAK_META
@@ -24,11 +24,10 @@ export MP_DEBUG_NOTIMEOUT=yes
export EXECgfs=${EXECgfs:-${HOMEgfs}/exec}
export PARMgfs=${PARMgfs:-${HOMEgfs}/parm}
export EXPDIR=${EXPDIR:-${HOMEgfs}/parm/config}
-export FIXgempak=${FIXgempak:-${HOMEgfs}/gempak/fix}
export USHgempak=${USHgempak:-${HOMEgfs}/gempak/ush}
export SRCgfs=${SRCgfs:-${HOMEgfs}/scripts}
-cp ${FIXgempak}/datatype.tbl datatype.tbl
+cp ${HOMEgfs}/gempak/fix/datatype.tbl datatype.tbl
#############################################
#set the fcst hrs for all the cycles
diff --git a/jobs/JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF b/jobs/JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF
index 58b24c5e49..aa0acf445e 100755
--- a/jobs/JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF
+++ b/jobs/JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF
@@ -15,7 +15,6 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "gempak_gif" -c "base"
export EXECgfs=${EXECgfs:-${HOMEgfs}/exec}
export PARMgfs=${PARMgfs:-${HOMEgfs}/parm}
export EXPDIR=${EXPDIR:-${HOMEgfs}/parm/config}
-export FIXgfs=${FIXgfs:-${HOMEgfs}/gempak/fix}
export USHgempak=${USHgempak:-${HOMEgfs}/gempak/ush}
export SRCgfs=${SRCgfs:-${HOMEgfs}/scripts}
export UTILgfs=${UTILgfs:-${HOMEgfs}/util}
@@ -24,7 +23,6 @@ export UTILgfs=${UTILgfs:-${HOMEgfs}/util}
# Set up the GEMPAK directory
#######################################
export HOMEgempak=${HOMEgempak:-${HOMEgfs}/gempak}
-export FIXgempak=${FIXgempak:-${HOMEgempak}/fix}
export USHgempak=${USHgempak:-${HOMEgempak}/ush}
export MP_PULSE=0
diff --git a/jobs/JGFS_ATMOS_GEMPAK_PGRB2_SPEC b/jobs/JGFS_ATMOS_GEMPAK_PGRB2_SPEC
index 47415a39ff..8b17f96a22 100755
--- a/jobs/JGFS_ATMOS_GEMPAK_PGRB2_SPEC
+++ b/jobs/JGFS_ATMOS_GEMPAK_PGRB2_SPEC
@@ -13,7 +13,6 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "gempak_spec" -c "base"
export EXECgfs="${EXECgfs:-${HOMEgfs}/exec}"
export PARMgfs="${PARMgfs:-${HOMEgfs}/parm}"
export EXPDIR="${EXPDIR:-${HOMEgfs}/parm/config}"
-export FIXgempak="${FIXgempak:-${HOMEgfs}/gempak/fix}"
export USHgempak="${USHgempak:-${HOMEgfs}/gempak/ush}"
export SRCgfs="${SRCgfs:-${HOMEgfs}/scripts}"
diff --git a/jobs/JGFS_ATMOS_PGRB2_SPEC_NPOESS b/jobs/JGFS_ATMOS_PGRB2_SPEC_NPOESS
index a98835ada2..ecebdffa3e 100755
--- a/jobs/JGFS_ATMOS_PGRB2_SPEC_NPOESS
+++ b/jobs/JGFS_ATMOS_PGRB2_SPEC_NPOESS
@@ -19,7 +19,6 @@ export PARMgfs=${PARMgfs:-${HOMEgfs}/parm}
export EXPDIR=${EXPDIR:-${HOMEgfs}/parm/config}
export PARMwmo=${PARMwmo:-${HOMEgfs}/parm/wmo}
export PARMproduct=${PARMproduct:-${HOMEgfs}/parm/product}
-export FIXgfs=${FIXgfs:-${HOMEgfs}/fix}
###################################
# Specify NET and RUN Name and model
diff --git a/jobs/JGFS_ATMOS_POSTSND b/jobs/JGFS_ATMOS_POSTSND
index 721dd27628..4e99ef7a46 100755
--- a/jobs/JGFS_ATMOS_POSTSND
+++ b/jobs/JGFS_ATMOS_POSTSND
@@ -24,7 +24,6 @@ export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn}
export HOMEbufrsnd=${HOMEbufrsnd:-${HOMEgfs}}
export EXECbufrsnd=${EXECbufrsnd:-${HOMEbufrsnd}/exec}
-export FIXbufrsnd=${FIXbufrsnd:-${HOMEbufrsnd}/fix/product}
export PARMbufrsnd=${PARMbufrsnd:-${HOMEbufrsnd}/parm/product}
export USHbufrsnd=${USHbufrsnd:-${HOMEbufrsnd}/ush}
export SCRbufrsnd=${SCRbufrsnd:-${HOMEbufrsnd}/scripts}
diff --git a/jobs/JGLOBAL_ARCHIVE b/jobs/JGLOBAL_ARCHIVE
index 66f6dfa8dc..235084e631 100755
--- a/jobs/JGLOBAL_ARCHIVE
+++ b/jobs/JGLOBAL_ARCHIVE
@@ -14,16 +14,15 @@ YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS COM_ATMOS_BUFR COM_ATMO
COM_ATMOS_TRACK COM_ATMOS_WMO \
COM_CHEM_HISTORY COM_CHEM_ANALYSIS\
COM_MED_RESTART \
- COM_ICE_HISTORY COM_ICE_INPUT COM_ICE_RESTART \
+ COM_ICE_HISTORY COM_ICE_INPUT COM_ICE_RESTART COM_ICE_GRIB \
COM_OBS COM_TOP \
- COM_OCEAN_HISTORY COM_OCEAN_INPUT COM_OCEAN_RESTART COM_OCEAN_XSECT COM_OCEAN_2D COM_OCEAN_3D \
+ COM_OCEAN_HISTORY COM_OCEAN_INPUT COM_OCEAN_RESTART COM_OCEAN_GRIB COM_OCEAN_NETCDF \
COM_OCEAN_ANALYSIS \
COM_WAVE_GRID COM_WAVE_HISTORY COM_WAVE_STATION \
COM_ATMOS_OZNMON COM_ATMOS_RADMON COM_ATMOS_MINMON COM_CONF
for grid in "0p25" "0p50" "1p00"; do
YMD=${PDY} HH=${cyc} GRID=${grid} generate_com -rx "COM_ATMOS_GRIB_${grid}:COM_ATMOS_GRIB_GRID_TMPL"
- YMD=${PDY} HH=${cyc} GRID=${grid} generate_com -rx "COM_OCEAN_GRIB_${grid}:COM_OCEAN_GRIB_GRID_TMPL"
done
###############################################################
diff --git a/jobs/JGLOBAL_ATMOS_ANALYSIS b/jobs/JGLOBAL_ATMOS_ANALYSIS
index 9e5850bfc3..3d7a4278a2 100755
--- a/jobs/JGLOBAL_ATMOS_ANALYSIS
+++ b/jobs/JGLOBAL_ATMOS_ANALYSIS
@@ -79,7 +79,7 @@ export PREPQCPF="${COM_OBS}/${OPREFIX}prepbufr.acft_profiles"
# Copy fix file for obsproc # TODO: Why is this necessary?
if [[ ${RUN} = "gfs" ]]; then
mkdir -p ${ROTDIR}/fix
- cp ${FIXgsi}/prepobs_errtable.global ${ROTDIR}/fix/
+ cp ${FIXgfs}/gsi/prepobs_errtable.global ${ROTDIR}/fix/
fi
diff --git a/jobs/JGLOBAL_ATMOS_POST_MANAGER b/jobs/JGLOBAL_ATMOS_POST_MANAGER
index 7c726bc2ad..797d26953b 100755
--- a/jobs/JGLOBAL_ATMOS_POST_MANAGER
+++ b/jobs/JGLOBAL_ATMOS_POST_MANAGER
@@ -17,7 +17,6 @@ export RUN=${RUN:-gfs}
####################################
export HOMEgfs=${HOMEgfs:-${PACKAGEROOT}/gfs.${gfs_ver}}
export EXECgfs=${HOMEgfs:-${HOMEgfs}/exec}
-export FIXgfs=${HOMEgfs:-${HOMEgfs}/fix}
export PARMgfs=${HOMEgfs:-${HOMEgfs}/parm}
export USHgfs=${HOMEgfs:-${HOMEgfs}/ush}
diff --git a/jobs/JGLOBAL_ATMOS_SFCANL b/jobs/JGLOBAL_ATMOS_SFCANL
index 0d709e56dd..3d897db4c3 100755
--- a/jobs/JGLOBAL_ATMOS_SFCANL
+++ b/jobs/JGLOBAL_ATMOS_SFCANL
@@ -26,7 +26,7 @@ export GPREFIX="${GDUMP}.t${gcyc}z."
export APREFIX="${CDUMP}.t${cyc}z."
YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS COM_ATMOS_ANALYSIS COM_ATMOS_RESTART \
- COM_LAND_ANALYSIS
+ COM_SNOW_ANALYSIS
RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \
COM_OBS_PREV:COM_OBS_TMPL \
diff --git a/jobs/JGLOBAL_OCEANICE_PRODUCTS b/jobs/JGLOBAL_OCEANICE_PRODUCTS
new file mode 100755
index 0000000000..1d8c6b42c6
--- /dev/null
+++ b/jobs/JGLOBAL_OCEANICE_PRODUCTS
@@ -0,0 +1,40 @@
+#! /usr/bin/env bash
+
+source "${HOMEgfs}/ush/preamble.sh"
+source "${HOMEgfs}/ush/jjob_header.sh" -e "oceanice_products" -c "base oceanice_products"
+
+
+##############################################
+# Begin JOB SPECIFIC work
+##############################################
+
+# Construct COM variables from templates
+YMD="${PDY}" HH="${cyc}" generate_com -rx "COM_${COMPONENT^^}_HISTORY"
+YMD="${PDY}" HH="${cyc}" generate_com -rx "COM_${COMPONENT^^}_GRIB"
+YMD="${PDY}" HH="${cyc}" generate_com -rx "COM_${COMPONENT^^}_NETCDF"
+
+###############################################################
+# Run exglobal script
+"${HOMEgfs}/scripts/exglobal_oceanice_products.py"
+status=$?
+(( status != 0 )) && exit "${status}"
+
+##############################################
+# End JOB SPECIFIC work
+##############################################
+
+##############################################
+# Final processing
+##############################################
+if [[ -e "${pgmout}" ]]; then
+ cat "${pgmout}"
+fi
+
+##########################################
+# Remove the Temporary working directory
+##########################################
+cd "${DATAROOT}" || exit 1
+[[ "${KEEPDATA:-NO}" == "NO" ]] && rm -rf "${DATA}"
+
+
+exit 0
diff --git a/jobs/JGLOBAL_PREP_LAND_OBS b/jobs/JGLOBAL_PREP_SNOW_OBS
similarity index 86%
rename from jobs/JGLOBAL_PREP_LAND_OBS
rename to jobs/JGLOBAL_PREP_SNOW_OBS
index 9b14451568..22ed977a5e 100755
--- a/jobs/JGLOBAL_PREP_LAND_OBS
+++ b/jobs/JGLOBAL_PREP_SNOW_OBS
@@ -1,8 +1,8 @@
#! /usr/bin/env bash
source "${HOMEgfs}/ush/preamble.sh"
-export DATA=${DATA:-${DATAROOT}/${RUN}landanl_${cyc}}
-source "${HOMEgfs}/ush/jjob_header.sh" -e "preplandobs" -c "base preplandobs"
+export DATA=${DATA:-${DATAROOT}/${RUN}snowanl_${cyc}}
+source "${HOMEgfs}/ush/jjob_header.sh" -e "prepsnowobs" -c "base prepsnowobs"
##############################################
# Set variables used in the script
@@ -25,7 +25,7 @@ RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \
###############################################################
# Run relevant script
-EXSCRIPT=${GDASLANDPREPSH:-${HOMEgfs}/scripts/exglobal_prep_land_obs.py}
+EXSCRIPT=${GDASSNOWPREPPY:-${HOMEgfs}/scripts/exglobal_prep_snow_obs.py}
${EXSCRIPT}
status=$?
[[ ${status} -ne 0 ]] && (echo "FATAL ERROR: Error executing ${EXSCRIPT}, ABORT!"; exit "${status}")
diff --git a/jobs/JGLOBAL_LAND_ANALYSIS b/jobs/JGLOBAL_SNOW_ANALYSIS
similarity index 78%
rename from jobs/JGLOBAL_LAND_ANALYSIS
rename to jobs/JGLOBAL_SNOW_ANALYSIS
index 0cef66de09..db1a8a8ba3 100755
--- a/jobs/JGLOBAL_LAND_ANALYSIS
+++ b/jobs/JGLOBAL_SNOW_ANALYSIS
@@ -1,8 +1,8 @@
#! /usr/bin/env bash
source "${HOMEgfs}/ush/preamble.sh"
-export DATA=${DATA:-${DATAROOT}/${RUN}landanl_${cyc}}
-source "${HOMEgfs}/ush/jjob_header.sh" -e "landanl" -c "base landanl"
+export DATA=${DATA:-${DATAROOT}/${RUN}snowanl_${cyc}}
+source "${HOMEgfs}/ush/jjob_header.sh" -e "snowanl" -c "base snowanl"
##############################################
# Set variables used in the script
@@ -18,17 +18,17 @@ GDUMP="gdas"
# Begin JOB SPECIFIC work
##############################################
# Generate COM variables from templates
-YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS COM_LAND_ANALYSIS COM_CONF
+YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS COM_SNOW_ANALYSIS COM_CONF
RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \
COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL
-mkdir -m 775 -p "${COM_LAND_ANALYSIS}" "${COM_CONF}"
+mkdir -m 775 -p "${COM_SNOW_ANALYSIS}" "${COM_CONF}"
###############################################################
# Run relevant script
-EXSCRIPT=${LANDANLPY:-${HOMEgfs}/scripts/exglobal_land_analysis.py}
+EXSCRIPT=${SNOWANLPY:-${HOMEgfs}/scripts/exglobal_snow_analysis.py}
${EXSCRIPT}
status=$?
[[ ${status} -ne 0 ]] && exit "${status}"
diff --git a/jobs/JGLOBAL_WAVE_INIT b/jobs/JGLOBAL_WAVE_INIT
index 7ad742f25a..7825c33496 100755
--- a/jobs/JGLOBAL_WAVE_INIT
+++ b/jobs/JGLOBAL_WAVE_INIT
@@ -10,7 +10,6 @@ export errchk=${errchk:-err_chk}
export MP_PULSE=0
# Path to HOME Directory
-export FIXwave=${FIXwave:-${HOMEgfs}/fix/wave}
export PARMwave=${PARMwave:-${HOMEgfs}/parm/wave}
export USHwave=${USHwave:-${HOMEgfs}/ush}
export EXECwave=${EXECwave:-${HOMEgfs}/exec}
diff --git a/jobs/JGLOBAL_WAVE_POST_BNDPNT b/jobs/JGLOBAL_WAVE_POST_BNDPNT
index 9d404077fd..69200fba0b 100755
--- a/jobs/JGLOBAL_WAVE_POST_BNDPNT
+++ b/jobs/JGLOBAL_WAVE_POST_BNDPNT
@@ -9,7 +9,6 @@ export errchk=${errchk:-err_chk}
export MP_PULSE=0
# Path to HOME Directory
-export FIXwave=${FIXwave:-${HOMEgfs}/fix/wave}
export PARMwave=${PARMwave:-${HOMEgfs}/parm/wave}
export USHwave=${USHwave:-${HOMEgfs}/ush}
export EXECwave=${EXECwave:-${HOMEgfs}/exec}
diff --git a/jobs/JGLOBAL_WAVE_POST_BNDPNTBLL b/jobs/JGLOBAL_WAVE_POST_BNDPNTBLL
index 3de49fcc3b..536cadd165 100755
--- a/jobs/JGLOBAL_WAVE_POST_BNDPNTBLL
+++ b/jobs/JGLOBAL_WAVE_POST_BNDPNTBLL
@@ -13,7 +13,6 @@ export CDATE=${PDY}${cyc}
export MP_PULSE=0
# Path to HOME Directory
-export FIXwave=${FIXwave:-${HOMEgfs}/fix/wave}
export PARMwave=${PARMwave:-${HOMEgfs}/parm/wave}
export USHwave=${USHwave:-${HOMEgfs}/ush}
export EXECwave=${EXECwave:-${HOMEgfs}/exec}
diff --git a/jobs/JGLOBAL_WAVE_POST_PNT b/jobs/JGLOBAL_WAVE_POST_PNT
index 1b573435a3..9ff2f78f63 100755
--- a/jobs/JGLOBAL_WAVE_POST_PNT
+++ b/jobs/JGLOBAL_WAVE_POST_PNT
@@ -9,7 +9,6 @@ export errchk=${errchk:-err_chk}
export MP_PULSE=0
# Path to HOME Directory
-export FIXwave=${FIXwave:-${HOMEgfs}/fix/wave}
export PARMwave=${PARMwave:-${HOMEgfs}/parm/wave}
export USHwave=${USHwave:-${HOMEgfs}/ush}
export EXECwave=${EXECwave:-${HOMEgfs}/exec}
diff --git a/jobs/JGLOBAL_WAVE_POST_SBS b/jobs/JGLOBAL_WAVE_POST_SBS
index 231b793de7..2a7e5657dc 100755
--- a/jobs/JGLOBAL_WAVE_POST_SBS
+++ b/jobs/JGLOBAL_WAVE_POST_SBS
@@ -9,7 +9,6 @@ export errchk=${errchk:-err_chk}
export MP_PULSE=0
# Path to HOME Directory
-export FIXwave=${FIXwave:-${HOMEgfs}/fix/wave}
export PARMwave=${PARMwave:-${HOMEgfs}/parm/wave}
export USHwave=${USHwave:-${HOMEgfs}/ush}
export EXECwave=${EXECwave:-${HOMEgfs}/exec}
diff --git a/jobs/JGLOBAL_WAVE_PREP b/jobs/JGLOBAL_WAVE_PREP
index f246045f53..479f9b8641 100755
--- a/jobs/JGLOBAL_WAVE_PREP
+++ b/jobs/JGLOBAL_WAVE_PREP
@@ -17,7 +17,6 @@ export MP_PULSE=0
export CDO=${CDO_ROOT}/bin/cdo
# Path to HOME Directory
-export FIXwave=${FIXwave:-${HOMEgfs}/fix/wave}
export PARMwave=${PARMwave:-${HOMEgfs}/parm/wave}
export USHwave=${USHwave:-${HOMEgfs}/ush}
export EXECwave=${EXECwave:-${HOMEgfs}/exec}
diff --git a/jobs/rocoto/awips_g2.sh b/jobs/rocoto/awips_g2.sh
index 121c96d63f..ed5a10c164 100755
--- a/jobs/rocoto/awips_g2.sh
+++ b/jobs/rocoto/awips_g2.sh
@@ -44,6 +44,7 @@ for fhr3 in ${fhrlst}; do
fhmax=240
if (( fhr >= fhmin && fhr <= fhmax )); then
if ((fhr % 6 == 0)); then
+ export fcsthrs="${fhr3}"
"${AWIPSG2SH}"
fi
fi
diff --git a/jobs/rocoto/oceanice_products.sh b/jobs/rocoto/oceanice_products.sh
new file mode 100755
index 0000000000..48816fb3a1
--- /dev/null
+++ b/jobs/rocoto/oceanice_products.sh
@@ -0,0 +1,37 @@
+#! /usr/bin/env bash
+
+source "${HOMEgfs}/ush/preamble.sh"
+
+###############################################################
+## ocean ice products driver script
+## FHRLST : forecast hour list to post-process (e.g. f000, f000_f001_f002, ...)
+###############################################################
+
+# Source FV3GFS workflow modules
+. "${HOMEgfs}/ush/load_fv3gfs_modules.sh"
+status=$?
+if (( status != 0 )); then exit "${status}"; fi
+
+###############################################################
+# setup python path for workflow utilities and tasks
+wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src"
+PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}"
+export PYTHONPATH
+
+export job="oceanice_products"
+export jobid="${job}.$$"
+
+###############################################################
+# shellcheck disable=SC2153,SC2001
+IFS='_' read -ra fhrs <<< "${FHRLST//f}" # strip off the 'f's and convert to array
+
+#---------------------------------------------------------------
+# Execute the JJOB
+for fhr in "${fhrs[@]}"; do
+ export FORECAST_HOUR=$(( 10#${fhr} ))
+ "${HOMEgfs}/jobs/JGLOBAL_OCEANICE_PRODUCTS"
+ status=$?
+ if (( status != 0 )); then exit "${status}"; fi
+done
+
+exit 0
diff --git a/jobs/rocoto/ocnpost.sh b/jobs/rocoto/ocnpost.sh
deleted file mode 100755
index 5a2dc091cf..0000000000
--- a/jobs/rocoto/ocnpost.sh
+++ /dev/null
@@ -1,119 +0,0 @@
-#! /usr/bin/env bash
-
-source "${HOMEgfs}/ush/preamble.sh"
-
-###############################################################
-## CICE5/MOM6 post driver script
-## FHRGRP : forecast hour group to post-process (e.g. 0, 1, 2 ...)
-## FHRLST : forecast hourlist to be post-process (e.g. anl, f000, f000_f001_f002, ...)
-###############################################################
-
-# Source FV3GFS workflow modules
-source "${HOMEgfs}/ush/load_fv3gfs_modules.sh"
-status=$?
-(( status != 0 )) && exit "${status}"
-
-export job="ocnpost"
-export jobid="${job}.$$"
-source "${HOMEgfs}/ush/jjob_header.sh" -e "ocnpost" -c "base ocnpost"
-
-##############################################
-# Set variables used in the exglobal script
-##############################################
-export CDUMP=${RUN/enkf}
-
-##############################################
-# Begin JOB SPECIFIC work
-##############################################
-YMD=${PDY} HH=${cyc} generate_com -rx COM_OCEAN_HISTORY COM_OCEAN_2D COM_OCEAN_3D \
- COM_OCEAN_XSECT COM_ICE_HISTORY
-
-for grid in "0p50" "0p25"; do
- YMD=${PDY} HH=${cyc} GRID=${grid} generate_com -rx "COM_OCEAN_GRIB_${grid}:COM_OCEAN_GRIB_GRID_TMPL"
-done
-
-for outdir in COM_OCEAN_2D COM_OCEAN_3D COM_OCEAN_XSECT COM_OCEAN_GRIB_0p25 COM_OCEAN_GRIB_0p50; do
- if [[ ! -d "${!outdir}" ]]; then
- mkdir -p "${!outdir}"
- fi
-done
-
-fhrlst=$(echo ${FHRLST} | sed -e 's/_/ /g; s/f/ /g; s/,/ /g')
-
-export OMP_NUM_THREADS=1
-export ENSMEM=${ENSMEM:-000}
-
-export IDATE=${PDY}${cyc}
-
-for fhr in ${fhrlst}; do
- export fhr=${fhr}
- # Ignore possible spelling error (nothing is misspelled)
- # shellcheck disable=SC2153
- VDATE=$(${NDATE} "${fhr}" "${IDATE}")
- # shellcheck disable=
- declare -x VDATE
- cd "${DATA}" || exit 2
- if (( 10#${fhr} > 0 )); then
- # TODO: This portion calls NCL scripts that are deprecated (see Issue #923)
- if [[ "${MAKE_OCN_GRIB:-YES}" == "YES" ]]; then
- export MOM6REGRID=${MOM6REGRID:-${HOMEgfs}}
- "${MOM6REGRID}/scripts/run_regrid.sh"
- status=$?
- [[ ${status} -ne 0 ]] && exit "${status}"
-
- # Convert the netcdf files to grib2
- export executable=${MOM6REGRID}/exec/reg2grb2.x
- "${MOM6REGRID}/scripts/run_reg2grb2.sh"
- status=$?
- [[ ${status} -ne 0 ]] && exit "${status}"
- ${NMV} "ocn_ice${VDATE}.${ENSMEM}.${IDATE}_0p25x0p25.grb2" "${COM_OCEAN_GRIB_0p25}/"
- ${NMV} "ocn_ice${VDATE}.${ENSMEM}.${IDATE}_0p5x0p5.grb2" "${COM_OCEAN_GRIB_0p50}/"
- fi
-
- #break up ocn netcdf into multiple files:
- if [[ -f "${COM_OCEAN_2D}/ocn_2D_${VDATE}.${ENSMEM}.${IDATE}.nc" ]]; then
- echo "File ${COM_OCEAN_2D}/ocn_2D_${VDATE}.${ENSMEM}.${IDATE}.nc already exists"
- else
- ncks -x -v vo,uo,so,temp \
- "${COM_OCEAN_HISTORY}/ocn${VDATE}.${ENSMEM}.${IDATE}.nc" \
- "${COM_OCEAN_2D}/ocn_2D_${VDATE}.${ENSMEM}.${IDATE}.nc"
- status=$?
- [[ ${status} -ne 0 ]] && exit "${status}"
- fi
- if [[ -f "${COM_OCEAN_3D}/ocn_3D_${VDATE}.${ENSMEM}.${IDATE}.nc" ]]; then
- echo "File ${COM_OCEAN_3D}/ocn_3D_${VDATE}.${ENSMEM}.${IDATE}.nc already exists"
- else
- ncks -x -v Heat_PmE,LW,LwLatSens,MLD_003,MLD_0125,SSH,SSS,SST,SSU,SSV,SW,cos_rot,ePBL,evap,fprec,frazil,latent,lprec,lrunoff,sensible,sin_rot,speed,taux,tauy,wet_c,wet_u,wet_v \
- "${COM_OCEAN_HISTORY}/ocn${VDATE}.${ENSMEM}.${IDATE}.nc" \
- "${COM_OCEAN_3D}/ocn_3D_${VDATE}.${ENSMEM}.${IDATE}.nc"
- status=$?
- [[ ${status} -ne 0 ]] && exit "${status}"
- fi
- if [[ -f "${COM_OCEAN_XSECT}/ocn-temp-EQ_${VDATE}.${ENSMEM}.${IDATE}.nc" ]]; then
- echo "File ${COM_OCEAN_XSECT}/ocn-temp-EQ_${VDATE}.${ENSMEM}.${IDATE}.nc already exists"
- else
- ncks -v temp -d yh,0.0 \
- "${COM_OCEAN_3D}/ocn_3D_${VDATE}.${ENSMEM}.${IDATE}.nc" \
- "${COM_OCEAN_XSECT}/ocn-temp-EQ_${VDATE}.${ENSMEM}.${IDATE}.nc"
- status=$?
- [[ ${status} -ne 0 ]] && exit "${status}"
- fi
- if [[ -f "${COM_OCEAN_XSECT}/ocn-uo-EQ_${VDATE}.${ENSMEM}.${IDATE}.nc" ]]; then
- echo "File ${COM_OCEAN_XSECT}/ocn-uo-EQ_${VDATE}.${ENSMEM}.${IDATE}.nc already exists"
- else
- ncks -v uo -d yh,0.0 \
- "${COM_OCEAN_3D}/ocn_3D_${VDATE}.${ENSMEM}.${IDATE}.nc" \
- "${COM_OCEAN_XSECT}/ocn-uo-EQ_${VDATE}.${ENSMEM}.${IDATE}.nc"
- status=$?
- [[ ${status} -ne 0 ]] && exit "${status}"
- fi
- fi
-done
-
-# clean up working folder
-if [[ ${KEEPDATA:-"NO"} = "NO" ]] ; then rm -rf "${DATA}" ; fi
-###############################################################
-# Exit out cleanly
-
-
-exit 0
diff --git a/jobs/rocoto/preplandobs.sh b/jobs/rocoto/prepsnowobs.sh
similarity index 92%
rename from jobs/rocoto/preplandobs.sh
rename to jobs/rocoto/prepsnowobs.sh
index 6304dd611b..dae11129eb 100755
--- a/jobs/rocoto/preplandobs.sh
+++ b/jobs/rocoto/prepsnowobs.sh
@@ -8,7 +8,7 @@ source "${HOMEgfs}/ush/preamble.sh"
status=$?
[[ ${status} -ne 0 ]] && exit "${status}"
-export job="preplandobs"
+export job="prepsnowobs"
export jobid="${job}.$$"
###############################################################
@@ -20,6 +20,6 @@ export PYTHONPATH
###############################################################
# Execute the JJOB
-"${HOMEgfs}/jobs/JGLOBAL_PREP_LAND_OBS"
+"${HOMEgfs}/jobs/JGLOBAL_PREP_SNOW_OBS"
status=$?
exit "${status}"
diff --git a/jobs/rocoto/landanl.sh b/jobs/rocoto/snowanl.sh
similarity index 91%
rename from jobs/rocoto/landanl.sh
rename to jobs/rocoto/snowanl.sh
index f49b6f9f8b..627dd860f4 100755
--- a/jobs/rocoto/landanl.sh
+++ b/jobs/rocoto/snowanl.sh
@@ -8,7 +8,7 @@ source "${HOMEgfs}/ush/preamble.sh"
status=$?
[[ ${status} -ne 0 ]] && exit "${status}"
-export job="landanl"
+export job="snowanl"
export jobid="${job}.$$"
###############################################################
@@ -19,6 +19,6 @@ export PYTHONPATH
###############################################################
# Execute the JJOB
-"${HOMEgfs}/jobs/JGLOBAL_LAND_ANALYSIS"
+"${HOMEgfs}/jobs/JGLOBAL_SNOW_ANALYSIS"
status=$?
exit "${status}"
diff --git a/modulefiles/module_base.hera.lua b/modulefiles/module_base.hera.lua
index 311fb0a1cf..8703100e76 100644
--- a/modulefiles/module_base.hera.lua
+++ b/modulefiles/module_base.hera.lua
@@ -2,9 +2,8 @@ help([[
Load environment to run GFS on Hera
]])
-spack_stack_ver=(os.getenv("spack_stack_ver") or "None")
-spack_env=(os.getenv("spack_env") or "None")
-prepend_path("MODULEPATH", "/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-" .. spack_stack_ver .. "/envs/" .. spack_env .. "/install/modulefiles/Core")
+local spack_mod_path=(os.getenv("spack_mod_path") or "None")
+prepend_path("MODULEPATH", spack_mod_path)
load(pathJoin("stack-intel", (os.getenv("stack_intel_ver") or "None")))
load(pathJoin("stack-intel-oneapi-mpi", (os.getenv("stack_impi_ver") or "None")))
@@ -33,10 +32,11 @@ load(pathJoin("wgrib2", (os.getenv("wgrib2_ver") or "None")))
load(pathJoin("py-netcdf4", (os.getenv("py_netcdf4_ver") or "None")))
load(pathJoin("py-pyyaml", (os.getenv("py_pyyaml_ver") or "None")))
load(pathJoin("py-jinja2", (os.getenv("py_jinja2_ver") or "None")))
-
--- MET/METplus are not available for use with spack-stack, yet
---load(pathJoin("met", (os.getenv("met_ver") or "None")))
---load(pathJoin("metplus", (os.getenv("metplus_ver") or "None")))
+load(pathJoin("py-pandas", (os.getenv("py_pandas_ver") or "None")))
+load(pathJoin("py-python-dateutil", (os.getenv("py_python_dateutil_ver") or "None")))
+load(pathJoin("met", (os.getenv("met_ver") or "None")))
+load(pathJoin("metplus", (os.getenv("metplus_ver") or "None")))
+load(pathJoin("py-xarray", (os.getenv("py_xarray_ver") or "None")))
setenv("WGRIB2","wgrib2")
setenv("UTILROOT",(os.getenv("prod_util_ROOT") or "None"))
diff --git a/modulefiles/module_base.hercules.lua b/modulefiles/module_base.hercules.lua
index d9c8f5ed0b..5534437392 100644
--- a/modulefiles/module_base.hercules.lua
+++ b/modulefiles/module_base.hercules.lua
@@ -2,9 +2,8 @@ help([[
Load environment to run GFS on Hercules
]])
-spack_stack_ver=(os.getenv("spack_stack_ver") or "None")
-spack_env=(os.getenv("spack_env") or "None")
-prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-" .. spack_stack_ver .. "/envs/" .. spack_env .. "/install/modulefiles/Core")
+local spack_mod_path=(os.getenv("spack_mod_path") or "None")
+prepend_path("MODULEPATH", spack_mod_path)
load(pathJoin("stack-intel", (os.getenv("stack_intel_ver") or "None")))
load(pathJoin("stack-intel-oneapi-mpi", (os.getenv("stack_impi_ver") or "None")))
@@ -12,10 +11,6 @@ load(pathJoin("intel-oneapi-mkl", (os.getenv("intel_mkl_ver") or "None")))
load(pathJoin("python", (os.getenv("python_ver") or "None")))
load(pathJoin("perl", (os.getenv("perl_ver") or "None")))
--- TODO load NCL once the SAs remove the 'depends_on' statements within it
--- NCL is a static installation and does not depend on any libraries
--- but as is will load, among others, the system netcdf-c/4.9.0 module
---load(pathJoin("ncl", (os.getenv("ncl_ver") or "None")))
load(pathJoin("jasper", (os.getenv("jasper_ver") or "None")))
load(pathJoin("libpng", (os.getenv("libpng_ver") or "None")))
load(pathJoin("cdo", (os.getenv("cdo_ver") or "None")))
@@ -35,6 +30,11 @@ load(pathJoin("wgrib2", (os.getenv("wgrib2_ver") or "None")))
load(pathJoin("py-netcdf4", (os.getenv("py_netcdf4_ver") or "None")))
load(pathJoin("py-pyyaml", (os.getenv("py_pyyaml_ver") or "None")))
load(pathJoin("py-jinja2", (os.getenv("py_jinja2_ver") or "None")))
+load(pathJoin("py-pandas", (os.getenv("py_pandas_ver") or "None")))
+load(pathJoin("py-python-dateutil", (os.getenv("py_python_dateutil_ver") or "None")))
+load(pathJoin("met", (os.getenv("met_ver") or "None")))
+load(pathJoin("metplus", (os.getenv("metplus_ver") or "None")))
+load(pathJoin("py-xarray", (os.getenv("py_xarray_ver") or "None")))
setenv("WGRIB2","wgrib2")
setenv("UTILROOT",(os.getenv("prod_util_ROOT") or "None"))
diff --git a/modulefiles/module_base.jet.lua b/modulefiles/module_base.jet.lua
index 64d35da57a..55a1eb1c68 100644
--- a/modulefiles/module_base.jet.lua
+++ b/modulefiles/module_base.jet.lua
@@ -2,9 +2,8 @@ help([[
Load environment to run GFS on Jet
]])
-spack_stack_ver=(os.getenv("spack_stack_ver") or "None")
-spack_env=(os.getenv("spack_env") or "None")
-prepend_path("MODULEPATH", "/mnt/lfs4/HFIP/hfv3gfs/role.epic/spack-stack/spack-stack-" .. spack_stack_ver .. "/envs/" .. spack_env .. "/install/modulefiles/Core")
+local spack_mod_path=(os.getenv("spack_mod_path") or "None")
+prepend_path("MODULEPATH", spack_mod_path)
load(pathJoin("stack-intel", (os.getenv("stack_intel_ver") or "None")))
load(pathJoin("stack-intel-oneapi-mpi", (os.getenv("stack_impi_ver") or "None")))
@@ -33,6 +32,11 @@ load(pathJoin("wgrib2", (os.getenv("wgrib2_ver") or "None")))
load(pathJoin("py-netcdf4", (os.getenv("py_netcdf4_ver") or "None")))
load(pathJoin("py-pyyaml", (os.getenv("py_pyyaml_ver") or "None")))
load(pathJoin("py-jinja2", (os.getenv("py_jinja2_ver") or "None")))
+load(pathJoin("py-pandas", (os.getenv("py_pandas_ver") or "None")))
+load(pathJoin("py-python-dateutil", (os.getenv("py_python_dateutil_ver") or "None")))
+load(pathJoin("met", (os.getenv("met_ver") or "None")))
+load(pathJoin("metplus", (os.getenv("metplus_ver") or "None")))
+load(pathJoin("py-xarray", (os.getenv("py_xarray_ver") or "None")))
setenv("WGRIB2","wgrib2")
setenv("UTILROOT",(os.getenv("prod_util_ROOT") or "None"))
diff --git a/modulefiles/module_base.orion.lua b/modulefiles/module_base.orion.lua
index 65486855d0..4e2e24b82f 100644
--- a/modulefiles/module_base.orion.lua
+++ b/modulefiles/module_base.orion.lua
@@ -2,9 +2,8 @@ help([[
Load environment to run GFS on Orion
]])
-spack_stack_ver=(os.getenv("spack_stack_ver") or "None")
-spack_env=(os.getenv("spack_env") or "None")
-prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/orion/spack-stack-" .. spack_stack_ver .. "/envs/" .. spack_env .. "/install/modulefiles/Core")
+local spack_mod_path=(os.getenv("spack_mod_path") or "None")
+prepend_path("MODULEPATH", spack_mod_path)
load(pathJoin("stack-intel", (os.getenv("stack_intel_ver") or "None")))
load(pathJoin("stack-intel-oneapi-mpi", (os.getenv("stack_impi_ver") or "None")))
@@ -31,10 +30,11 @@ load(pathJoin("wgrib2", (os.getenv("wgrib2_ver") or "None")))
load(pathJoin("py-netcdf4", (os.getenv("py_netcdf4_ver") or "None")))
load(pathJoin("py-pyyaml", (os.getenv("py_pyyaml_ver") or "None")))
load(pathJoin("py-jinja2", (os.getenv("py_jinja2_ver") or "None")))
-
--- MET/METplus are not yet supported with spack-stack
---load(pathJoin("met", (os.getenv("met_ver") or "None")))
---load(pathJoin("metplus", (os.getenv("metplus_ver") or "None")))
+load(pathJoin("py-pandas", (os.getenv("py_pandas_ver") or "None")))
+load(pathJoin("py-python-dateutil", (os.getenv("py_python_dateutil_ver") or "None")))
+load(pathJoin("met", (os.getenv("met_ver") or "None")))
+load(pathJoin("metplus", (os.getenv("metplus_ver") or "None")))
+load(pathJoin("py-xarray", (os.getenv("py_xarray_ver") or "None")))
setenv("WGRIB2","wgrib2")
setenv("UTILROOT",(os.getenv("prod_util_ROOT") or "None"))
diff --git a/modulefiles/module_base.s4.lua b/modulefiles/module_base.s4.lua
index d99a93c3f4..d8dccc89ba 100644
--- a/modulefiles/module_base.s4.lua
+++ b/modulefiles/module_base.s4.lua
@@ -2,9 +2,8 @@ help([[
Load environment to run GFS on S4
]])
-spack_stack_ver=(os.getenv("spack_stack_ver") or "None")
-spack_env=(os.getenv("spack_env") or "None")
-prepend_path("MODULEPATH", "/data/prod/jedi/spack-stack/spack-stack-" .. spack_stack_ver .. "/envs/" .. spack_env .. "/install/modulefiles/Core")
+local spack_mod_path=(os.getenv("spack_mod_path") or "None")
+prepend_path("MODULEPATH", spack_mod_path)
load(pathJoin("stack-intel", (os.getenv("stack_intel_ver") or "None")))
load(pathJoin("stack-intel-oneapi-mpi", (os.getenv("stack_impi_ver") or "None")))
@@ -30,6 +29,11 @@ load(pathJoin("wgrib2", (os.getenv("wgrib2_ver") or "None")))
load(pathJoin("py-netcdf4", (os.getenv("py_netcdf4_ver") or "None")))
load(pathJoin("py-pyyaml", (os.getenv("py_pyyaml_ver") or "None")))
load(pathJoin("py-jinja2", (os.getenv("py_jinja2_ver") or "None")))
+load(pathJoin("py-pandas", (os.getenv("py_pandas_ver") or "None")))
+load(pathJoin("py-python-dateutil", (os.getenv("py_python_dateutil_ver") or "None")))
+load(pathJoin("met", (os.getenv("met_ver") or "None")))
+load(pathJoin("metplus", (os.getenv("metplus_ver") or "None")))
+load(pathJoin("py-xarray", (os.getenv("py_xarray_ver") or "None")))
setenv("WGRIB2","wgrib2")
setenv("UTILROOT",(os.getenv("prod_util_ROOT") or "None"))
diff --git a/modulefiles/module_base.wcoss2.lua b/modulefiles/module_base.wcoss2.lua
index ee4ee6a5fb..43b21ccc25 100644
--- a/modulefiles/module_base.wcoss2.lua
+++ b/modulefiles/module_base.wcoss2.lua
@@ -31,6 +31,11 @@ load(pathJoin("ncdiag", (os.getenv("ncdiag_ver") or "None")))
load(pathJoin("crtm", (os.getenv("crtm_ver") or "None")))
load(pathJoin("wgrib2", (os.getenv("wgrib2_ver") or "None")))
+prepend_path("MODULEPATH", "/apps/ops/para/libs/modulefiles/compiler/intel/19.1.3.304")
+setenv("HPC_OPT", "/apps/ops/para/libs")
+load(pathJoin("met", (os.getenv("met_ver") or "None")))
+load(pathJoin("metplus", (os.getenv("metplus_ver") or "None")))
+
--prepend_path("MODULEPATH", pathJoin("/lfs/h2/emc/global/save/emc.global/git/prepobs/v" .. (os.getenv("prepobs_run_ver") or "None"), "modulefiles"))
prepend_path("MODULEPATH", pathJoin("/lfs/h2/emc/global/save/emc.global/git/prepobs/feature-GFSv17_com_reorg_log_update/modulefiles"))
load(pathJoin("prepobs", (os.getenv("prepobs_run_ver") or "None")))
diff --git a/modulefiles/module_gwci.hera.lua b/modulefiles/module_gwci.hera.lua
index 1aecddf549..3d4c413a44 100644
--- a/modulefiles/module_gwci.hera.lua
+++ b/modulefiles/module_gwci.hera.lua
@@ -2,13 +2,13 @@ help([[
Load environment to run GFS workflow setup scripts on Hera
]])
-prepend_path("MODULEPATH", "/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.5.1/envs/gsi-addon/install/modulefiles/Core")
+prepend_path("MODULEPATH", "/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.6.0/envs/gsi-addon-dev/install/modulefiles/Core")
load(pathJoin("stack-intel", os.getenv("2021.5.0")))
load(pathJoin("stack-intel-oneapi-mpi", os.getenv("2021.5.1")))
load(pathJoin("netcdf-c", os.getenv("4.9.2")))
-load(pathJoin("netcdf-fortran", os.getenv("4.6.0")))
+load(pathJoin("netcdf-fortran", os.getenv("4.6.1")))
load(pathJoin("nccmp","1.9.0.1"))
load(pathJoin("wgrib2", "2.0.8"))
diff --git a/modulefiles/module_gwci.hercules.lua b/modulefiles/module_gwci.hercules.lua
index 9c60aed467..179bbef114 100644
--- a/modulefiles/module_gwci.hercules.lua
+++ b/modulefiles/module_gwci.hercules.lua
@@ -2,7 +2,7 @@ help([[
Load environment to run GFS workflow ci scripts on Hercules
]])
-prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-1.5.1/envs/gsi-addon/install/modulefiles/Core")
+prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-1.6.0/envs/gsi-addon-env/install/modulefiles/Core")
load(pathJoin("stack-intel", os.getenv("2021.9.0")))
load(pathJoin("stack-intel-oneapi-mpi", os.getenv("2021.9.0")))
diff --git a/modulefiles/module_gwci.orion.lua b/modulefiles/module_gwci.orion.lua
index 18851ba7d4..cef7acf308 100644
--- a/modulefiles/module_gwci.orion.lua
+++ b/modulefiles/module_gwci.orion.lua
@@ -2,13 +2,13 @@ help([[
Load environment to run GFS workflow ci scripts on Orion
]])
-prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/orion/spack-stack-1.5.1/envs/gsi-addon/install/modulefiles/Core")
+prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/orion/spack-stack-1.6.0/envs/gsi-addon-env/install/modulefiles/Core")
load(pathJoin("stack-intel", os.getenv("2022.0.2")))
load(pathJoin("stack-intel-oneapi-mpi", os.getenv("2021.5.1")))
load(pathJoin("netcdf-c", os.getenv("4.9.2")))
-load(pathJoin("netcdf-fortran", os.getenv("4.6.0")))
+load(pathJoin("netcdf-fortran", os.getenv("4.6.1")))
load(pathJoin("nccmp","1.9.0.1"))
load(pathJoin("wgrib2", "2.0.8"))
diff --git a/modulefiles/module_gwsetup.hera.lua b/modulefiles/module_gwsetup.hera.lua
index 961403e1a2..3e8bf2d7f8 100644
--- a/modulefiles/module_gwsetup.hera.lua
+++ b/modulefiles/module_gwsetup.hera.lua
@@ -4,17 +4,17 @@ Load environment to run GFS workflow setup scripts on Hera
load(pathJoin("rocoto"))
-prepend_path("MODULEPATH", "/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.5.1/envs/gsi-addon/install/modulefiles/Core")
+prepend_path("MODULEPATH", "/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.6.0/envs/gsi-addon-dev/install/modulefiles/Core")
local stack_intel_ver=os.getenv("stack_intel_ver") or "2021.5.0"
-local python_ver=os.getenv("python_ver") or "3.10.8"
+local python_ver=os.getenv("python_ver") or "3.11.6"
load(pathJoin("stack-intel", stack_intel_ver))
load(pathJoin("python", python_ver))
load("py-jinja2")
load("py-pyyaml")
load("py-numpy")
-local git_ver=os.getenv("git_ver") or "2.40.0"
+local git_ver=os.getenv("git_ver") or "2.18.0"
load(pathJoin("git", git_ver))
whatis("Description: GFS run setup environment")
diff --git a/modulefiles/module_gwsetup.hercules.lua b/modulefiles/module_gwsetup.hercules.lua
index 673928605c..795b295b30 100644
--- a/modulefiles/module_gwsetup.hercules.lua
+++ b/modulefiles/module_gwsetup.hercules.lua
@@ -5,10 +5,10 @@ Load environment to run GFS workflow ci scripts on Hercules
load(pathJoin("contrib","0.1"))
load(pathJoin("rocoto","1.3.5"))
-prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-1.5.1/envs/gsi-addon/install/modulefiles/Core")
+prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-1.6.0/envs/gsi-addon-env/install/modulefiles/Core")
local stack_intel_ver=os.getenv("stack_intel_ver") or "2021.9.0"
-local python_ver=os.getenv("python_ver") or "3.10.8"
+local python_ver=os.getenv("python_ver") or "3.11.6"
load(pathJoin("stack-intel", stack_intel_ver))
load(pathJoin("python", python_ver))
diff --git a/modulefiles/module_gwsetup.jet.lua b/modulefiles/module_gwsetup.jet.lua
index d08389c711..72c40469e4 100644
--- a/modulefiles/module_gwsetup.jet.lua
+++ b/modulefiles/module_gwsetup.jet.lua
@@ -2,12 +2,12 @@ help([[
Load environment to run GFS workflow setup scripts on Jet
]])
-load(pathJoin("rocoto", "1.3.3"))
+load(pathJoin("rocoto"))
-prepend_path("MODULEPATH", "/mnt/lfs4/HFIP/hfv3gfs/role.epic/spack-stack/spack-stack-1.5.1/envs/gsi-addon/install/modulefiles/Core")
+prepend_path("MODULEPATH", "/mnt/lfs4/HFIP/hfv3gfs/role.epic/spack-stack/spack-stack-1.6.0/envs/gsi-addon-dev/install/modulefiles/Core")
local stack_intel_ver=os.getenv("stack_intel_ver") or "2021.5.0"
-local python_ver=os.getenv("python_ver") or "3.10.8"
+local python_ver=os.getenv("python_ver") or "3.11.6"
load(pathJoin("stack-intel", stack_intel_ver))
load(pathJoin("python", python_ver))
diff --git a/modulefiles/module_gwsetup.orion.lua b/modulefiles/module_gwsetup.orion.lua
index 93a59c8e50..96ed50f7f0 100644
--- a/modulefiles/module_gwsetup.orion.lua
+++ b/modulefiles/module_gwsetup.orion.lua
@@ -7,10 +7,10 @@ load(pathJoin("contrib","0.1"))
load(pathJoin("rocoto","1.3.3"))
load(pathJoin("git","2.28.0"))
-prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/orion/spack-stack-1.5.1/envs/gsi-addon/install/modulefiles/Core")
+prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/orion/spack-stack-1.6.0/envs/gsi-addon-env/install/modulefiles/Core")
local stack_intel_ver=os.getenv("stack_intel_ver") or "2022.0.2"
-local python_ver=os.getenv("python_ver") or "3.10.8"
+local python_ver=os.getenv("python_ver") or "3.11.6"
load(pathJoin("stack-intel", stack_intel_ver))
load(pathJoin("python", python_ver))
diff --git a/modulefiles/module_gwsetup.s4.lua b/modulefiles/module_gwsetup.s4.lua
index 291c654bb3..77a647006f 100644
--- a/modulefiles/module_gwsetup.s4.lua
+++ b/modulefiles/module_gwsetup.s4.lua
@@ -5,10 +5,10 @@ Load environment to run GFS workflow setup scripts on S4
load(pathJoin("rocoto","1.3.5"))
load(pathJoin("git","2.30.0"))
-prepend_path("MODULEPATH", "/data/prod/jedi/spack-stack/spack-stack-1.5.1/envs/gsi-addon/install/modulefiles/Core")
+prepend_path("MODULEPATH", "/data/prod/jedi/spack-stack/spack-stack-1.6.0/envs/gsi-addon-env/install/modulefiles/Core")
local stack_intel_ver=os.getenv("stack_intel_ver") or "2021.5.0"
-local python_ver=os.getenv("python_ver") or "3.10.8"
+local python_ver=os.getenv("python_ver") or "3.11.6"
load(pathJoin("stack-intel", stack_intel_ver))
load(pathJoin("python", python_ver))
diff --git a/parm/config/gefs/config.base.emc.dyn b/parm/config/gefs/config.base.emc.dyn
index ff2fe3377b..0062aed163 100644
--- a/parm/config/gefs/config.base.emc.dyn
+++ b/parm/config/gefs/config.base.emc.dyn
@@ -30,16 +30,6 @@ export UTILgfs=${HOMEgfs}/util
export EXECgfs=${HOMEgfs}/exec
export SCRgfs=${HOMEgfs}/scripts
-export FIXam="${FIXgfs}/am"
-export FIXaer="${FIXgfs}/aer"
-export FIXcpl="${FIXgfs}/cpl"
-export FIXlut="${FIXgfs}/lut"
-export FIXorog="${FIXgfs}/orog"
-export FIXcice="${FIXgfs}/cice"
-export FIXmom="${FIXgfs}/mom6"
-export FIXreg2grb2="${FIXgfs}/reg2grb2"
-export FIXugwd="${FIXgfs}/ugwd"
-
########################################################################
# GLOBAL static environment parameters
@@ -144,7 +134,7 @@ export DO_OCN="NO"
export DO_ICE="NO"
export DO_AERO="NO"
export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both
-export DOBNDPNT_WAVE="NO"
+export DOBNDPNT_WAVE="NO" # The GEFS buoys file does not currently have any boundary points
export FRAC_GRID=".true."
# Set operational resolution
@@ -220,9 +210,10 @@ export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4:
export FHMIN_GFS=0
export FHMIN=${FHMIN_GFS}
export FHMAX_GFS=@FHMAX_GFS@
-export FHOUT_GFS=6 # Must be 6 for S2S until #1629 is addressed; 3 for ops
+export FHOUT_GFS=6
export FHMAX_HF_GFS=0
export FHOUT_HF_GFS=1
+export FHOUT_OCNICE_GFS=6
if (( gfs_cyc != 0 )); then
export STEP_GFS=$(( 24 / gfs_cyc ))
else
@@ -257,7 +248,7 @@ export imp_physics=8
export DO_JEDIATMVAR="NO"
export DO_JEDIATMENS="NO"
export DO_JEDIOCNVAR="NO"
-export DO_JEDILANDDA="NO"
+export DO_JEDISNOWDA="NO"
export DO_MERGENSST="NO"
# Hybrid related
diff --git a/parm/config/gefs/config.fcst b/parm/config/gefs/config.fcst
index 6a2a852e0b..74a84e1173 100644
--- a/parm/config/gefs/config.fcst
+++ b/parm/config/gefs/config.fcst
@@ -27,6 +27,7 @@ export FHMAX=${FHMAX_GFS}
export FHOUT=${FHOUT_GFS}
export FHMAX_HF=${FHMAX_HF_GFS}
export FHOUT_HF=${FHOUT_HF_GFS}
+export FHOUT_OCNICE=${FHOUT_OCNICE_GFS}
# Get task specific resources
source "${EXPDIR}/config.resources" fcst
diff --git a/parm/config/gefs/config.oceanice_products b/parm/config/gefs/config.oceanice_products
new file mode 120000
index 0000000000..f6cf9cd60b
--- /dev/null
+++ b/parm/config/gefs/config.oceanice_products
@@ -0,0 +1 @@
+../gfs/config.oceanice_products
\ No newline at end of file
diff --git a/parm/config/gefs/config.resources b/parm/config/gefs/config.resources
index 36b70aecb8..1f6485931f 100644
--- a/parm/config/gefs/config.resources
+++ b/parm/config/gefs/config.resources
@@ -9,17 +9,17 @@ if (( $# != 1 )); then
echo "Must specify an input task argument to set resource variables!"
echo "argument can be any one of the following:"
echo "stage_ic aerosol_init"
- echo "prep preplandobs prepatmiodaobs"
+ echo "prep prepsnowobs prepatmiodaobs"
echo "atmanlinit atmanlrun atmanlfinal"
echo "atmensanlinit atmensanlrun atmensanlfinal"
- echo "landanl"
+ echo "snowanl"
echo "aeroanlinit aeroanlrun aeroanlfinal"
echo "anal sfcanl analcalc analdiag fcst echgres"
echo "upp atmos_products"
echo "tracker genesis genesis_fsu"
echo "verfozn verfrad vminmon fit2obs metp arch cleanup"
echo "eobs ediag eomg eupd ecen esfc efcs epos earc"
- echo "init_chem mom6ic ocnpost"
+ echo "init_chem mom6ic"
echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt"
echo "wavegempak waveawipsbulls waveawipsgridded"
echo "postsnd awips gempak npoess"
@@ -68,6 +68,15 @@ esac
export npe_node_max
case ${step} in
+
+ "stage_ic")
+ export wtime_stage_ic="00:15:00"
+ export npe_stage_ic=1
+ export npe_node_stage_ic=1
+ export nth_stage_ic=1
+ export is_exclusive=True
+ ;;
+
"waveinit")
export wtime_waveinit="00:10:00"
export npe_waveinit=12
@@ -77,25 +86,10 @@ case ${step} in
export memory_waveinit="2GB"
;;
- "wavepostsbs")
- export wtime_wavepostsbs="00:20:00"
- export wtime_wavepostsbs_gfs="03:00:00"
- export npe_wavepostsbs=8
- export nth_wavepostsbs=1
- export npe_node_wavepostsbs=$(( npe_node_max / nth_wavepostsbs ))
- export NTASKS=${npe_wavepostsbs}
- export memory_wavepostsbs="10GB"
- export memory_wavepostsbs_gfs="10GB"
- ;;
-
"fcst" | "efcs")
export is_exclusive=True
- if [[ "${step}" == "fcst" ]]; then
- _CDUMP_LIST=${CDUMP:-"gdas gfs"}
- elif [[ "${step}" == "efcs" ]]; then
- _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"}
- fi
+ _CDUMP_LIST=${CDUMP:-"gdas gfs"}
# During workflow creation, we need resources for all CDUMPs and CDUMP is undefined
for _CDUMP in ${_CDUMP_LIST}; do
@@ -224,11 +218,47 @@ case ${step} in
export is_exclusive=True
;;
- "stage_ic")
- export wtime_stage_ic="00:15:00"
- export npe_stage_ic=1
- export npe_node_stage_ic=1
- export nth_stage_ic=1
+ "oceanice_products")
+ export wtime_oceanice_products="00:15:00"
+ export npe_oceanice_products=1
+ export npe_node_oceanice_products=1
+ export nth_oceanice_products=1
+ export memory_oceanice_products="96GB"
+ ;;
+
+ "wavepostsbs")
+ export wtime_wavepostsbs="03:00:00"
+ export npe_wavepostsbs=1
+ export nth_wavepostsbs=1
+ export npe_node_wavepostsbs=$(( npe_node_max / nth_wavepostsbs ))
+ export NTASKS=${npe_wavepostsbs}
+ export memory_wavepostsbs="10GB"
+ ;;
+
+ "wavepostbndpnt")
+ export wtime_wavepostbndpnt="01:00:00"
+ export npe_wavepostbndpnt=240
+ export nth_wavepostbndpnt=1
+ export npe_node_wavepostbndpnt=$(( npe_node_max / nth_wavepostbndpnt ))
+ export NTASKS=${npe_wavepostbndpnt}
+ export is_exclusive=True
+ ;;
+
+ "wavepostbndpntbll")
+ export wtime_wavepostbndpntbll="01:00:00"
+ export npe_wavepostbndpntbll=448
+ export nth_wavepostbndpntbll=1
+ export npe_node_wavepostbndpntbll=$(( npe_node_max / nth_wavepostbndpntbll ))
+ export NTASKS=${npe_wavepostbndpntbll}
+ export is_exclusive=True
+ ;;
+
+ "wavepostpnt")
+ export wtime_wavepostpnt="04:00:00"
+ export npe_wavepostpnt=200
+ export nth_wavepostpnt=1
+ export npe_node_wavepostpnt=$(( npe_node_max / nth_wavepostpnt ))
+ export NTASKS=${npe_wavepostpnt}
export is_exclusive=True
;;
@@ -239,4 +269,4 @@ case ${step} in
esac
-echo "END: config.resources"
\ No newline at end of file
+echo "END: config.resources"
diff --git a/parm/config/gefs/config.wave b/parm/config/gefs/config.wave
index e04331e533..5f4448985c 100644
--- a/parm/config/gefs/config.wave
+++ b/parm/config/gefs/config.wave
@@ -7,11 +7,7 @@ echo "BEGIN: config.wave"
# Parameters that are common to all wave model steps
-# System and version
-export wave_sys_ver=v1.0.0
-
export EXECwave="${HOMEgfs}/exec"
-export FIXwave="${HOMEgfs}/fix/wave"
export PARMwave="${HOMEgfs}/parm/wave"
export USHwave="${HOMEgfs}/ush"
diff --git a/parm/config/gefs/config.wavepostbndpnt b/parm/config/gefs/config.wavepostbndpnt
new file mode 100644
index 0000000000..412c5fb42a
--- /dev/null
+++ b/parm/config/gefs/config.wavepostbndpnt
@@ -0,0 +1,11 @@
+#! /usr/bin/env bash
+
+########## config.wavepostbndpnt ##########
+# Wave steps specific
+
+echo "BEGIN: config.wavepostbndpnt"
+
+# Get task specific resources
+source "${EXPDIR}/config.resources" wavepostbndpnt
+
+echo "END: config.wavepostbndpnt"
diff --git a/parm/config/gefs/config.wavepostbndpntbll b/parm/config/gefs/config.wavepostbndpntbll
new file mode 100644
index 0000000000..6695ab0f84
--- /dev/null
+++ b/parm/config/gefs/config.wavepostbndpntbll
@@ -0,0 +1,11 @@
+#! /usr/bin/env bash
+
+########## config.wavepostbndpntbll ##########
+# Wave steps specific
+
+echo "BEGIN: config.wavepostbndpntbll"
+
+# Get task specific resources
+source "${EXPDIR}/config.resources" wavepostbndpntbll
+
+echo "END: config.wavepostbndpntbll"
diff --git a/parm/config/gefs/config.wavepostpnt b/parm/config/gefs/config.wavepostpnt
new file mode 100644
index 0000000000..e87237da82
--- /dev/null
+++ b/parm/config/gefs/config.wavepostpnt
@@ -0,0 +1,11 @@
+#! /usr/bin/env bash
+
+########## config.wavepostpnt ##########
+# Wave steps specific
+
+echo "BEGIN: config.wavepostpnt"
+
+# Get task specific resources
+source "${EXPDIR}/config.resources" wavepostpnt
+
+echo "END: config.wavepostpnt"
diff --git a/parm/config/gefs/config.wavepostsbs b/parm/config/gefs/config.wavepostsbs
new file mode 100644
index 0000000000..b3c5902e3c
--- /dev/null
+++ b/parm/config/gefs/config.wavepostsbs
@@ -0,0 +1,28 @@
+#! /usr/bin/env bash
+
+########## config.wavepostsbs ##########
+# Wave steps specific
+
+echo "BEGIN: config.wavepostsbs"
+
+# Get task specific resources
+source "${EXPDIR}/config.resources" wavepostsbs
+
+# Subgrid info for grib2 encoding
+export WAV_SUBGRBSRC=""
+export WAV_SUBGRB=""
+
+# Options for point output (switch on/off boundary point output)
+export DOIBP_WAV='NO' # Input boundary points
+export DOFLD_WAV='YES' # Field data
+export DOPNT_WAV='YES' # Station data
+export DOGRB_WAV='YES' # Create grib2 files
+if [[ -n "${waveinterpGRD}" ]]; then
+ export DOGRI_WAV='YES' # Create interpolated grids
+else
+ export DOGRI_WAV='NO' # Do not create interpolated grids
+fi
+export DOSPC_WAV='YES' # Spectral post
+export DOBLL_WAV='YES' # Bulletin post
+
+echo "END: config.wavepostsbs"
diff --git a/parm/config/gefs/yaml/defaults.yaml b/parm/config/gefs/yaml/defaults.yaml
index 84dfcbf718..b19eb57e55 100644
--- a/parm/config/gefs/yaml/defaults.yaml
+++ b/parm/config/gefs/yaml/defaults.yaml
@@ -2,6 +2,6 @@ base:
DO_JEDIATMVAR: "NO"
DO_JEDIATMENS: "NO"
DO_JEDIOCNVAR: "NO"
- DO_JEDILANDDA: "NO"
+ DO_JEDISNOWDA: "NO"
DO_MERGENSST: "NO"
FHMAX_GFS: 120
diff --git a/parm/config/gfs/config.aeroanl b/parm/config/gfs/config.aeroanl
index 32ba43b7ba..8d8aa92c16 100644
--- a/parm/config/gfs/config.aeroanl
+++ b/parm/config/gfs/config.aeroanl
@@ -10,8 +10,7 @@ export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/
export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml
export STATICB_TYPE='identity'
export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml
-export FIXgdas=${HOMEgfs}/fix/gdas
-export BERROR_DATA_DIR=${FIXgdas}/bump/aero/${CASE_ANL}/
+export BERROR_DATA_DIR=${FIXgfs}/gdas/bump/aero/${CASE_ANL}/
export BERROR_DATE="20160630.000000"
export io_layout_x=@IO_LAYOUT_X@
diff --git a/parm/config/gfs/config.anal b/parm/config/gfs/config.anal
index e3a17f9c6a..98d0e88cc2 100644
--- a/parm/config/gfs/config.anal
+++ b/parm/config/gfs/config.anal
@@ -45,51 +45,51 @@ export AMSR2BF=${AMSR2BF:-/dev/null}
# Set default values for info files and observation error
# NOTE: Remember to set PRVT in config.prep as OBERROR is set below
-export CONVINFO=${FIXgsi}/global_convinfo.txt
-export OZINFO=${FIXgsi}/global_ozinfo.txt
-export SATINFO=${FIXgsi}/global_satinfo.txt
-export OBERROR=${FIXgsi}/prepobs_errtable.global
+export CONVINFO=${FIXgfs}/gsi/global_convinfo.txt
+export OZINFO=${FIXgfs}/gsi/global_ozinfo.txt
+export SATINFO=${FIXgfs}/gsi/global_satinfo.txt
+export OBERROR=${FIXgfs}/gsi/prepobs_errtable.global
# Use experimental dumps in EMC GFS v16 parallels
if [[ ${RUN_ENVIR} == "emc" ]]; then
# Set info files and prepobs.errtable.global for GFS v16 retrospective parallels
if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then
- export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900
- export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900
+ export CONVINFO=${FIXgfs}/gsi/gfsv16_historical/global_convinfo.txt.2019021900
+ export OBERROR=${FIXgfs}/gsi/gfsv16_historical/prepobs_errtable.global.2019021900
fi
# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps
if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020040718" ]]; then
- export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706
- export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706
+ export CONVINFO=${FIXgfs}/gsi/gfsv16_historical/global_convinfo.txt.2019110706
+ export OBERROR=${FIXgfs}/gsi/gfsv16_historical/prepobs_errtable.global.2019110706
fi
# Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations
if [[ "${PDY}${cyc}" -ge "2020040718" && "${PDY}${cyc}" -lt "2020052612" ]]; then
- export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718
- export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718
+ export CONVINFO=${FIXgfs}/gsi/gfsv16_historical/global_convinfo.txt.2020040718
+ export OBERROR=${FIXgfs}/gsi/gfsv16_historical/prepobs_errtable.global.2020040718
fi
# Assimilate COSMIC-2
if [[ "${PDY}${cyc}" -ge "2020052612" && "${PDY}${cyc}" -lt "2020082412" ]]; then
- export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612
- export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718
+ export CONVINFO=${FIXgfs}/gsi/gfsv16_historical/global_convinfo.txt.2020052612
+ export OBERROR=${FIXgfs}/gsi/gfsv16_historical/prepobs_errtable.global.2020040718
fi
# Assimilate HDOB
if [[ "${PDY}${cyc}" -ge "2020082412" && "${PDY}${cyc}" -lt "2020091612" ]]; then
- export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412
+ export CONVINFO=${FIXgfs}/gsi/gfsv16_historical/global_convinfo.txt.2020082412
fi
# Assimilate Metop-C GNSSRO
if [[ "${PDY}${cyc}" -ge "2020091612" && "${PDY}${cyc}" -lt "2021031712" ]]; then
- export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612
+ export CONVINFO=${FIXgfs}/gsi/gfsv16_historical/global_convinfo.txt.2020091612
fi
# Assimilate DO-2 GeoOptics
if [[ "${PDY}${cyc}" -ge "2021031712" && "${PDY}${cyc}" -lt "2021091612" ]]; then
- export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712
+ export CONVINFO=${FIXgfs}/gsi/gfsv16_historical/global_convinfo.txt.2021031712
fi
# NOTE:
@@ -98,38 +98,38 @@ if [[ ${RUN_ENVIR} == "emc" ]]; then
# needed at this time.
# Assimilate COSMIC-2 GPS
# if [[ "${PDY}${cyc}" -ge "2021110312" && "${PDY}${cyc}" -lt "YYYYMMDDHH" ]]; then
- # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312
+ # export CONVINFO=${FIXgfs}/gsi/gfsv16_historical/global_convinfo.txt.2021110312
# fi
# Turn off assmilation of OMPS during period of bad data
if [[ "${PDY}${cyc}" -ge "2020011600" && "${PDY}${cyc}" -lt "2020011806" ]]; then
- export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600
+ export OZINFO=${FIXgfs}/gsi/gfsv16_historical/global_ozinfo.txt.2020011600
fi
# Set satinfo for start of GFS v16 parallels
if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then
- export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900
+ export SATINFO=${FIXgfs}/gsi/gfsv16_historical/global_satinfo.txt.2019021900
fi
# Turn on assimilation of Metop-C AMSUA and MHS
if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020022012" ]]; then
- export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706
+ export SATINFO=${FIXgfs}/gsi/gfsv16_historical/global_satinfo.txt.2019110706
fi
# Turn off assimilation of Metop-A MHS
if [[ "${PDY}${cyc}" -ge "2020022012" && "${PDY}${cyc}" -lt "2021052118" ]]; then
- export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012
+ export SATINFO=${FIXgfs}/gsi/gfsv16_historical/global_satinfo.txt.2020022012
fi
# Turn off assimilation of S-NPP CrIS
if [[ "${PDY}${cyc}" -ge "2021052118" && "${PDY}${cyc}" -lt "2021092206" ]]; then
- export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118
+ export SATINFO=${FIXgfs}/gsi/gfsv16_historical/global_satinfo.txt.2021052118
fi
# Turn off assimilation of MetOp-A IASI
if [[ "${PDY}${cyc}" -ge "2021092206" && "${PDY}${cyc}" -lt "2021102612" ]]; then
- export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206
+ export SATINFO=${FIXgfs}/gsi/gfsv16_historical/global_satinfo.txt.2021092206
fi
# NOTE:
@@ -139,7 +139,7 @@ if [[ ${RUN_ENVIR} == "emc" ]]; then
#
# Turn off assmilation of all Metop-A MHS
# if [[ "${PDY}${cyc}" -ge "2021110312" && "${PDY}${cyc}" -lt "YYYYMMDDHH" ]]; then
- # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312
+ # export SATINFO=${FIXgfs}/gsi/gfsv16_historical/global_satinfo.txt.2021110312
# fi
fi
diff --git a/parm/config/gfs/config.atmanl b/parm/config/gfs/config.atmanl
index 3e6b351cb2..e344b0a662 100644
--- a/parm/config/gfs/config.atmanl
+++ b/parm/config/gfs/config.atmanl
@@ -13,6 +13,9 @@ export STATICB_TYPE="gsibec"
export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml
export INTERP_METHOD='barycentric'
+export layout_x_atmanl=@LAYOUT_X_ATMANL@
+export layout_y_atmanl=@LAYOUT_Y_ATMANL@
+
export io_layout_x=@IO_LAYOUT_X@
export io_layout_y=@IO_LAYOUT_Y@
diff --git a/parm/config/gfs/config.atmensanl b/parm/config/gfs/config.atmensanl
index 58fd7b6e22..7a3a632bf8 100644
--- a/parm/config/gfs/config.atmensanl
+++ b/parm/config/gfs/config.atmensanl
@@ -10,6 +10,9 @@ export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml
export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml
export INTERP_METHOD='barycentric'
+export layout_x_atmensanl=@LAYOUT_X_ATMENSANL@
+export layout_y_atmensanl=@LAYOUT_Y_ATMENSANL@
+
export io_layout_x=@IO_LAYOUT_X@
export io_layout_y=@IO_LAYOUT_Y@
diff --git a/parm/config/gfs/config.base.emc.dyn b/parm/config/gfs/config.base.emc.dyn
index 1f6568c3ee..32284929c9 100644
--- a/parm/config/gfs/config.base.emc.dyn
+++ b/parm/config/gfs/config.base.emc.dyn
@@ -30,16 +30,6 @@ export UTILgfs="${HOMEgfs}/util"
export EXECgfs="${HOMEgfs}/exec"
export SCRgfs="${HOMEgfs}/scripts"
-export FIXam="${FIXgfs}/am"
-export FIXaer="${FIXgfs}/aer"
-export FIXcpl="${FIXgfs}/cpl"
-export FIXlut="${FIXgfs}/lut"
-export FIXorog="${FIXgfs}/orog"
-export FIXcice="${FIXgfs}/cice"
-export FIXmom="${FIXgfs}/mom6"
-export FIXreg2grb2="${FIXgfs}/reg2grb2"
-export FIXugwd="${FIXgfs}/ugwd"
-
########################################################################
# GLOBAL static environment parameters
@@ -95,7 +85,6 @@ export MODE="@MODE@" # cycled/forecast-only
# CLEAR
####################################################
# Build paths relative to $HOMEgfs
-export FIXgsi="${HOMEgfs}/fix/gsi"
export HOMEpost="${HOMEgfs}"
export HOMEobsproc="${BASE_GIT:-}/obsproc/v${obsproc_run_ver:-1.1.2}"
@@ -182,16 +171,16 @@ export ICERES="${OCNRES}"
# These are the currently recommended grid-combinations
case "${CASE}" in
"C48")
- export waveGRD='glo_500'
+ export waveGRD='uglo_100km'
;;
"C96" | "C192")
- export waveGRD='glo_200'
+ export waveGRD='uglo_100km'
;;
"C384")
- export waveGRD='glo_025'
+ export waveGRD='uglo_100km'
;;
"C768" | "C1152")
- export waveGRD='mx025'
+ export waveGRD='uglo_m1g16'
;;
*)
echo "FATAL ERROR: Unrecognized CASE ${CASE}, ABORT!"
@@ -247,6 +236,7 @@ fi
export FHMIN=0
export FHMAX=9
export FHOUT=3 # Will be changed to 1 in config.base if (DOHYBVAR set to NO and l4densvar set to false)
+export FHOUT_OCNICE=3
# Cycle to run EnKF (set to BOTH for both gfs and gdas)
export EUPD_CYC="gdas"
@@ -257,9 +247,10 @@ export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4:
# GFS output and frequency
export FHMIN_GFS=0
export FHMAX_GFS=@FHMAX_GFS@
-export FHOUT_GFS=6 # Must be 6 for S2S until #1629 is addressed; 3 for ops
+export FHOUT_GFS=3
export FHMAX_HF_GFS=0
export FHOUT_HF_GFS=1
+export FHOUT_OCNICE_GFS=6
if (( gfs_cyc != 0 )); then
export STEP_GFS=$(( 24 / gfs_cyc ))
else
@@ -304,7 +295,7 @@ export imp_physics=8
export DO_JEDIATMVAR="@DO_JEDIATMVAR@"
export DO_JEDIATMENS="@DO_JEDIATMENS@"
export DO_JEDIOCNVAR="@DO_JEDIOCNVAR@"
-export DO_JEDILANDDA="@DO_JEDILANDDA@"
+export DO_JEDISNOWDA="@DO_JEDISNOWDA@"
export DO_MERGENSST="@DO_MERGENSST@"
# Hybrid related
@@ -376,7 +367,7 @@ export netcdf_diag=".true."
export binary_diag=".false."
# Verification options
-export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp; not supported with spack-stack
+export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp
export DO_FIT2OBS="YES" # Run fit to observations package
export DO_VRFY_OCEANDA="NO" # Run SOCA Ocean DA verification tasks
@@ -396,4 +387,11 @@ export FITSARC="YES"
export FHMAX_FITS=132
[[ "${FHMAX_FITS}" -gt "${FHMAX_GFS}" ]] && export FHMAX_FITS=${FHMAX_GFS}
+# The monitor jobs are not yet supported for JEDIATMVAR
+if [[ ${DO_JEDIATMVAR} = "YES" ]]; then
+ export DO_VERFOZN="NO" # Ozone data assimilation monitoring
+ export DO_VERFRAD="NO" # Radiance data assimilation monitoring
+ export DO_VMINMON="NO" # GSI minimization monitoring
+fi
+
echo "END: config.base"
diff --git a/parm/config/gfs/config.com b/parm/config/gfs/config.com
index db648b5866..2f99e709ea 100644
--- a/parm/config/gfs/config.com
+++ b/parm/config/gfs/config.com
@@ -52,7 +52,7 @@ declare -rx COM_CONF_TMPL=${COM_BASE}'/conf'
declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input'
declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart'
declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos'
-declare -rx COM_LAND_ANALYSIS_TMPL=${COM_BASE}'/analysis/land'
+declare -rx COM_SNOW_ANALYSIS_TMPL=${COM_BASE}'/analysis/snow'
declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history'
declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master'
declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2'
@@ -80,15 +80,16 @@ declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history'
declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart'
declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input'
declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean'
-declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D'
-declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D'
-declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect'
+declare -rx COM_OCEAN_NETCDF_TMPL=${COM_BASE}'/products/ocean/netcdf'
declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2'
declare -rx COM_OCEAN_GRIB_GRID_TMPL=${COM_OCEAN_GRIB_TMPL}'/${GRID}'
declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input'
declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history'
declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart'
+declare -rx COM_ICE_NETCDF_TMPL=${COM_BASE}'/products/ice/netcdf'
+declare -rx COM_ICE_GRIB_TMPL=${COM_BASE}'/products/ice/grib2'
+declare -rx COM_ICE_GRIB_GRID_TMPL=${COM_ICE_GRIB_TMPL}'/${GRID}'
declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history'
declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem'
diff --git a/parm/config/gfs/config.esfc b/parm/config/gfs/config.esfc
index 2bb3d48bb4..7c32313758 100644
--- a/parm/config/gfs/config.esfc
+++ b/parm/config/gfs/config.esfc
@@ -16,4 +16,9 @@ if [ $DOIAU_ENKF = "YES" ]; then
export DOSFCANL_ENKF="NO"
fi
+# Turn off NST in JEDIATMENS
+if [[ "${DO_JEDIATMENS}" == "YES" ]]; then
+ export DONST="NO"
+fi
+
echo "END: config.esfc"
diff --git a/parm/config/gfs/config.fcst b/parm/config/gfs/config.fcst
index d2e2664e9c..c03c97e56f 100644
--- a/parm/config/gfs/config.fcst
+++ b/parm/config/gfs/config.fcst
@@ -30,6 +30,7 @@ case ${RUN} in
export FHOUT=${FHOUT_GFS}
export FHMAX_HF=${FHMAX_HF_GFS}
export FHOUT_HF=${FHOUT_HF_GFS}
+ export FHOUT_OCNICE=${FHOUT_OCNICE_GFS}
;;
*gdas)
export FHMAX_HF=0
diff --git a/parm/config/gfs/config.metp b/parm/config/gfs/config.metp
index c90903f6a5..8260d1c472 100644
--- a/parm/config/gfs/config.metp
+++ b/parm/config/gfs/config.metp
@@ -23,6 +23,7 @@ export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflo
export model=${PSLOT}
export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2"
export model_hpss_dir=${ATARDIR}/..
+export model_dir=${ARCDIR}/..
export get_data_from_hpss="NO"
export hpss_walltime="10"
## OUTPUT SETTINGS
diff --git a/parm/config/gfs/config.nsst b/parm/config/gfs/config.nsst
index db4367b2c0..7bda81f058 100644
--- a/parm/config/gfs/config.nsst
+++ b/parm/config/gfs/config.nsst
@@ -10,6 +10,11 @@ echo "BEGIN: config.nsst"
# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled
export NST_MODEL=2
+# Set NST_MODEL for JEDIATMVAR or JEDIATMENS
+if [[ "${DO_JEDIATMVAR}" == "YES" || "${DO_JEDIATMENS}" == "YES" ]]; then
+ export NST_MODEL=1
+fi
+
# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON,
export NST_SPINUP=0
cdate="${PDY}${cyc}"
diff --git a/parm/config/gfs/config.oceanice_products b/parm/config/gfs/config.oceanice_products
new file mode 100644
index 0000000000..bea70c21cc
--- /dev/null
+++ b/parm/config/gfs/config.oceanice_products
@@ -0,0 +1,15 @@
+#! /usr/bin/env bash
+
+########## config.oceanice_products ##########
+
+echo "BEGIN: config.oceanice_products"
+
+# Get task specific resources
+source "${EXPDIR}/config.resources" oceanice_products
+
+export OCEANICEPRODUCTS_CONFIG="${HOMEgfs}/parm/post/oceanice_products.yaml"
+
+# No. of forecast hours to process in a single job
+export NFHRS_PER_GROUP=3
+
+echo "END: config.oceanice_products"
diff --git a/parm/config/gfs/config.ocnpost b/parm/config/gfs/config.ocnpost
deleted file mode 100644
index 851c476e6c..0000000000
--- a/parm/config/gfs/config.ocnpost
+++ /dev/null
@@ -1,29 +0,0 @@
-#! /usr/bin/env bash
-
-########## config.ocnpost ##########
-
-echo "BEGIN: config.ocnpost"
-
-# Get task specific resources
-source "${EXPDIR}/config.resources" ocnpost
-
-# Convert netcdf files to grib files using post job
-#-------------------------------------------
-case "${OCNRES}" in
- "025") export MAKE_OCN_GRIB="YES";;
- "050") export MAKE_OCN_GRIB="NO";;
- "100") export MAKE_OCN_GRIB="NO";;
- "500") export MAKE_OCN_GRIB="NO";;
- *) export MAKE_OCN_GRIB="NO";;
-esac
-
-if [[ "${machine}" = "WCOSS2" ]] || [[ "${machine}" = "HERCULES" ]]; then
- #Currently the conversion to netcdf uses NCL which is not on WCOSS2 or HERCULES
- #This should be removed when this is updated
- export MAKE_OCN_GRIB="NO"
-fi
-
-# No. of forecast hours to process in a single job
-export NFHRS_PER_GROUP=3
-
-echo "END: config.ocnpost"
diff --git a/parm/config/gfs/config.prep b/parm/config/gfs/config.prep
index d5ac1925f7..9733eabc19 100644
--- a/parm/config/gfs/config.prep
+++ b/parm/config/gfs/config.prep
@@ -20,7 +20,6 @@ export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat}
export HOMERELO=$HOMEgfs
export EXECRELO=${HOMERELO}/exec
-export FIXRELO=${HOMERELO}/fix/am
export USHRELO=${HOMERELO}/ush
# Adjust observation error for GFS v16 parallels
@@ -28,18 +27,18 @@ export USHRELO=${HOMERELO}/ush
# NOTE: Remember to set OBERROR in config.anal as PRVT is set below
#
# Set default prepobs_errtable.global
-export PRVT=$FIXgsi/prepobs_errtable.global
+export PRVT=${FIXgfs}/gsi/prepobs_errtable.global
# Set prepobs.errtable.global for GFS v16 retrospective parallels
if [[ $RUN_ENVIR == "emc" ]]; then
if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then
- export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900
+ export PRVT=${FIXgfs}/gsi/gfsv16_historical/prepobs_errtable.global.2019021900
fi
# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps
if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020040718" ]]; then
- export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706
+ export PRVT=${FIXgfs}/gsi/gfsv16_historical/prepobs_errtable.global.2019110706
fi
# NOTE:
diff --git a/parm/config/gfs/config.preplandobs b/parm/config/gfs/config.preplandobs
deleted file mode 100644
index 20ae20b5ad..0000000000
--- a/parm/config/gfs/config.preplandobs
+++ /dev/null
@@ -1,18 +0,0 @@
-#! /usr/bin/env bash
-
-########## config.preplandobs ##########
-# Land Obs Prep specific
-
-echo "BEGIN: config.preplandobs"
-
-# Get task specific resources
-. "${EXPDIR}/config.resources" preplandobs
-
-export GTS_OBS_LIST="${HOMEgfs}/sorc/gdas.cd/parm/land/prep/prep_gts.yaml"
-export BUFR2IODAX="${HOMEgfs}/exec/bufr2ioda.x"
-export FIMS_NML_TMPL="${HOMEgfs}/sorc/gdas.cd/parm/land/prep/fims.nml.j2"
-export IMS_OBS_LIST="${HOMEgfs}/sorc/gdas.cd/parm/land/prep/prep_ims.yaml"
-export CALCFIMSEXE="${HOMEgfs}/exec/calcfIMS.exe"
-export IMS2IODACONV="${HOMEgfs}/ush/imsfv3_scf2ioda.py"
-
-echo "END: config.preplandobs"
diff --git a/parm/config/gfs/config.prepsnowobs b/parm/config/gfs/config.prepsnowobs
new file mode 100644
index 0000000000..64eb8ba896
--- /dev/null
+++ b/parm/config/gfs/config.prepsnowobs
@@ -0,0 +1,18 @@
+#! /usr/bin/env bash
+
+########## config.prepsnowobs ##########
+# Snow Obs Prep specific
+
+echo "BEGIN: config.prepsnowobs"
+
+# Get task specific resources
+. "${EXPDIR}/config.resources" prepsnowobs
+
+export GTS_OBS_LIST="${HOMEgfs}/sorc/gdas.cd/parm/snow/prep/prep_gts.yaml"
+export BUFR2IODAX="${HOMEgfs}/exec/bufr2ioda.x"
+export FIMS_NML_TMPL="${HOMEgfs}/sorc/gdas.cd/parm/snow/prep/fims.nml.j2"
+export IMS_OBS_LIST="${HOMEgfs}/sorc/gdas.cd/parm/snow/prep/prep_ims.yaml"
+export CALCFIMSEXE="${HOMEgfs}/exec/calcfIMS.exe"
+export IMS2IODACONV="${HOMEgfs}/ush/imsfv3_scf2ioda.py"
+
+echo "END: config.prepsnowobs"
diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources
index 80a2c78757..a78bdb1384 100644
--- a/parm/config/gfs/config.resources
+++ b/parm/config/gfs/config.resources
@@ -9,17 +9,17 @@ if (( $# != 1 )); then
echo "Must specify an input task argument to set resource variables!"
echo "argument can be any one of the following:"
echo "stage_ic aerosol_init"
- echo "prep preplandobs prepatmiodaobs"
+ echo "prep prepsnowobs prepatmiodaobs"
echo "atmanlinit atmanlrun atmanlfinal"
echo "atmensanlinit atmensanlrun atmensanlfinal"
- echo "landanl"
+ echo "snowanl"
echo "aeroanlinit aeroanlrun aeroanlfinal"
echo "anal sfcanl analcalc analdiag fcst echgres"
echo "upp atmos_products"
echo "tracker genesis genesis_fsu"
echo "verfozn verfrad vminmon fit2obs metp arch cleanup"
echo "eobs ediag eomg eupd ecen esfc efcs epos earc"
- echo "init_chem mom6ic ocnpost"
+ echo "init_chem mom6ic oceanice_products"
echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt"
echo "wavegempak waveawipsbulls waveawipsgridded"
echo "postsnd awips gempak npoess"
@@ -83,15 +83,15 @@ case ${step} in
fi
;;
- "preplandobs")
- export wtime_preplandobs="00:05:00"
- export npe_preplandobs=1
- export nth_preplandobs=1
- export npe_node_preplandobs=1
+ "prepsnowobs")
+ export wtime_prepsnowobs="00:05:00"
+ export npe_prepsnowobs=1
+ export nth_prepsnowobs=1
+ export npe_node_prepsnowobs=1
;;
"prepatmiodaobs")
- export wtime_prepatmiodaobs="00:10:00"
+ export wtime_prepatmiodaobs="00:30:00"
export npe_prepatmiodaobs=1
export nth_prepatmiodaobs=1
export npe_node_prepatmiodaobs=$(( npe_node_max / nth_prepatmiodaobs ))
@@ -195,9 +195,8 @@ case ${step} in
;;
"atmanlinit")
- # make below case dependent later
- export layout_x=1
- export layout_y=1
+ export layout_x=${layout_x_atmanl}
+ export layout_y=${layout_y_atmanl}
export layout_gsib_x=$(( layout_x * 3 ))
export layout_gsib_y=$(( layout_y * 2 ))
@@ -211,9 +210,8 @@ case ${step} in
;;
"atmanlrun")
- # make below case dependent later
- export layout_x=1
- export layout_y=1
+ export layout_x=${layout_x_atmanl}
+ export layout_y=${layout_y_atmanl}
export wtime_atmanlrun="00:30:00"
export npe_atmanlrun=$(( layout_x * layout_y * 6 ))
@@ -221,6 +219,7 @@ case ${step} in
export nth_atmanlrun=1
export nth_atmanlrun_gfs=${nth_atmanlrun}
export npe_node_atmanlrun=$(( npe_node_max / nth_atmanlrun ))
+ export memory_atmanlrun="96GB"
export is_exclusive=True
;;
@@ -232,7 +231,7 @@ case ${step} in
export is_exclusive=True
;;
- "landanl")
+ "snowanl")
# below lines are for creating JEDI YAML
case ${CASE} in
"C768")
@@ -255,10 +254,10 @@ case ${step} in
export layout_x
export layout_y
- export wtime_landanl="00:15:00"
- export npe_landanl=$(( layout_x * layout_y * 6 ))
- export nth_landanl=1
- export npe_node_landanl=$(( npe_node_max / nth_landanl ))
+ export wtime_snowanl="00:15:00"
+ export npe_snowanl=$(( layout_x * layout_y * 6 ))
+ export nth_snowanl=1
+ export npe_node_snowanl=$(( npe_node_max / nth_snowanl ))
;;
"aeroanlinit")
@@ -663,7 +662,7 @@ case ${step} in
declare -x "wtime_${step}_gfs"="03:00:00"
;;
"C384")
- declare -x "wtime_${step}"="00:10:00"
+ declare -x "wtime_${step}"="00:30:00"
declare -x "wtime_${step}_gfs"="06:00:00"
;;
"C768" | "C1152")
@@ -680,17 +679,12 @@ case ${step} in
unset NTASKS_TOT
;;
- "ocnpost")
- export wtime_ocnpost="00:30:00"
- export npe_ocnpost=1
- export npe_node_ocnpost=1
- export nth_ocnpost=1
- export memory_ocnpost="96G"
- if [[ ${machine} == "JET" ]]; then
- # JET only has 88GB of requestable memory per node
- # so a second node is required to meet the requiremtn
- npe_ocnpost=2
- fi
+ "oceanice_products")
+ export wtime_oceanice_products="00:15:00"
+ export npe_oceanice_products=1
+ export npe_node_oceanice_products=1
+ export nth_oceanice_products=1
+ export memory_oceanice_products="96GB"
;;
"upp")
@@ -700,6 +694,7 @@ case ${step} in
;;
"C192" | "C384" | "C768")
export npe_upp=120
+ export memory_upp="48GB"
;;
*)
echo "FATAL ERROR: Resources not defined for job ${job} at resolution ${CASE}"
@@ -770,7 +765,7 @@ case ${step} in
export npe_genesis=1
export nth_genesis=1
export npe_node_genesis=1
- export memory_genesis="4G"
+ export memory_genesis="10G"
;;
"genesis_fsu")
@@ -778,7 +773,7 @@ case ${step} in
export npe_genesis_fsu=1
export nth_genesis_fsu=1
export npe_node_genesis_fsu=1
- export memory_genesis_fsu="4G"
+ export memory_genesis_fsu="10G"
;;
"fit2obs")
@@ -861,9 +856,8 @@ case ${step} in
;;
"atmensanlinit")
- # make below case dependent later
- export layout_x=1
- export layout_y=1
+ export layout_x=${layout_x_atmensanl}
+ export layout_y=${layout_y_atmensanl}
export wtime_atmensanlinit="00:10:00"
export npe_atmensanlinit=1
@@ -873,9 +867,8 @@ case ${step} in
;;
"atmensanlrun")
- # make below case dependent later
- export layout_x=1
- export layout_y=1
+ export layout_x=${layout_x_atmensanl}
+ export layout_y=${layout_y_atmensanl}
export wtime_atmensanlrun="00:30:00"
export npe_atmensanlrun=$(( layout_x * layout_y * 6 ))
@@ -883,6 +876,7 @@ case ${step} in
export nth_atmensanlrun=1
export nth_atmensanlrun_gfs=${nth_atmensanlrun}
export npe_node_atmensanlrun=$(( npe_node_max / nth_atmensanlrun ))
+ export memory_atmensanlrun="96GB"
export is_exclusive=True
;;
diff --git a/parm/config/gfs/config.sfcanl b/parm/config/gfs/config.sfcanl
index 9592fb77c9..e2fde8992a 100644
--- a/parm/config/gfs/config.sfcanl
+++ b/parm/config/gfs/config.sfcanl
@@ -8,4 +8,9 @@ echo "BEGIN: config.sfcanl"
# Get task specific resources
. $EXPDIR/config.resources sfcanl
+# Turn off NST in JEDIATMVAR
+if [[ "${DO_JEDIATMVAR}" == "YES" ]]; then
+ export DONST="NO"
+fi
+
echo "END: config.sfcanl"
diff --git a/parm/config/gfs/config.landanl b/parm/config/gfs/config.snowanl
similarity index 52%
rename from parm/config/gfs/config.landanl
rename to parm/config/gfs/config.snowanl
index 70ebae7529..3303ce402b 100644
--- a/parm/config/gfs/config.landanl
+++ b/parm/config/gfs/config.snowanl
@@ -1,24 +1,24 @@
#! /usr/bin/env bash
-########## config.landanl ##########
-# configuration common to land analysis tasks
+########## config.snowanl ##########
+# configuration common to snow analysis tasks
-echo "BEGIN: config.landanl"
+echo "BEGIN: config.snowanl"
# Get task specific resources
-. "${EXPDIR}/config.resources" landanl
+. "${EXPDIR}/config.resources" snowanl
-obs_list_name=gdas_land_gts_only.yaml
+obs_list_name=gdas_snow_gts_only.yaml
if [[ "${cyc}" = "18" ]]; then
- obs_list_name=gdas_land_prototype.yaml
+ obs_list_name=gdas_snow_prototype.yaml
fi
-export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/
-export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name}
+export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/snow/obs/config/
+export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/snow/obs/lists/${obs_list_name}
# Name of the JEDI executable and its yaml template
export JEDIEXE="${HOMEgfs}/exec/fv3jedi_letkf.x"
-export JEDIYAML="${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml"
+export JEDIYAML="${HOMEgfs}/sorc/gdas.cd/parm/snow/letkfoi/letkfoi.yaml"
# Ensemble member properties
export SNOWDEPTHVAR="snodl"
@@ -26,9 +26,9 @@ export BESTDDEV="30." # Background Error Std. Dev. for LETKFOI
# Name of the executable that applies increment to bkg and its namelist template
export APPLY_INCR_EXE="${HOMEgfs}/exec/apply_incr.exe"
-export APPLY_INCR_NML_TMPL="${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/apply_incr_nml.j2"
+export APPLY_INCR_NML_TMPL="${HOMEgfs}/sorc/gdas.cd/parm/snow/letkfoi/apply_incr_nml.j2"
export io_layout_x=@IO_LAYOUT_X@
export io_layout_y=@IO_LAYOUT_Y@
-echo "END: config.landanl"
+echo "END: config.snowanl"
diff --git a/parm/config/gfs/config.stage_ic b/parm/config/gfs/config.stage_ic
index 7f3956af4d..63d0e4a5cf 100644
--- a/parm/config/gfs/config.stage_ic
+++ b/parm/config/gfs/config.stage_ic
@@ -8,7 +8,7 @@ echo "BEGIN: config.stage_ic"
source "${EXPDIR}/config.resources" stage_ic
case "${CASE}" in
- "C48" | "C96")
+ "C48" | "C96" | "C192")
export CPL_ATMIC="workflow_${CASE}_refactored"
export CPL_ICEIC="workflow_${CASE}_refactored"
export CPL_OCNIC="workflow_${CASE}_refactored"
@@ -21,16 +21,16 @@ case "${CASE}" in
export CPL_WAVIC=workflow_C384_refactored
;;
"C768")
- export CPL_ATMIC=HR2_refactored
- export CPL_ICEIC=HR1_refactored
- export CPL_OCNIC=HR1_refactored
- export CPL_WAVIC=HR1_refactored
+ export CPL_ATMIC=HR3C768
+ export CPL_ICEIC=HR3marine
+ export CPL_OCNIC=HR3marine
+ export CPL_WAVIC=HR3marine
;;
"C1152")
- export CPL_ATMIC=HR2_C1152_refactored
- export CPL_ICEIC=HR3_refactored
- export CPL_OCNIC=HR3_refactored
- export CPL_WAVIC=HR1_refactored
+ export CPL_ATMIC=HR3C1152
+ export CPL_ICEIC=HR3marine
+ export CPL_OCNIC=HR3marine
+ export CPL_WAVIC=HR3marine
;;
*)
echo "FATAL ERROR Unrecognized resolution: ${CASE}"
diff --git a/parm/config/gfs/config.ufs b/parm/config/gfs/config.ufs
index c8ce216899..2e299fddf7 100644
--- a/parm/config/gfs/config.ufs
+++ b/parm/config/gfs/config.ufs
@@ -15,7 +15,7 @@ if (( $# <= 1 )); then
echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072"
echo "--mom6 500|100|025"
echo "--cice6 500|100|025"
- echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|glo_025|glo_200|glo_500|mx025"
+ echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|glo_025|glo_200|glo_500|mx025|uglo_100km|uglo_m1g16"
echo "--gocart"
exit 1
@@ -416,6 +416,14 @@ if [[ "${skip_ww3}" == "false" ]]; then
"mx025")
ntasks_ww3=80
;;
+ "uglo_100km")
+ ntasks_ww3=40
+ nthreads_ww3=1
+ ;;
+ "uglo_m1g16")
+ ntasks_ww3=1000
+ nthreads_ww3=1
+ ;;
*)
echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!"
exit 1
diff --git a/parm/config/gfs/config.wave b/parm/config/gfs/config.wave
index acb4c518ba..8068ffcda6 100644
--- a/parm/config/gfs/config.wave
+++ b/parm/config/gfs/config.wave
@@ -7,11 +7,7 @@ echo "BEGIN: config.wave"
# Parameters that are common to all wave model steps
-# System and version
-export wave_sys_ver=v1.0.0
-
export EXECwave="${HOMEgfs}/exec"
-export FIXwave="${HOMEgfs}/fix/wave"
export PARMwave="${HOMEgfs}/parm/wave"
export USHwave="${HOMEgfs}/ush"
@@ -80,7 +76,19 @@ case "${waveGRD}" in
export wavepostGRD='glo_500'
export waveuoutpGRD=${waveGRD}
;;
- *)
+ "uglo_100km")
+ #unstructured 100km grid
+ export waveinterpGRD='glo_200'
+ export wavepostGRD=''
+ export waveuoutpGRD=${waveGRD}
+ ;;
+ "uglo_m1g16")
+ #unstructured m1v16 grid
+ export waveinterpGRD='glo_15mxt'
+ export wavepostGRD=''
+ export waveuoutpGRD=${waveGRD}
+ ;;
+ *)
echo "FATAL ERROR: No grid specific wave config values exist for ${waveGRD}. Aborting."
exit 1
;;
diff --git a/parm/config/gfs/config.wavepostbndpnt b/parm/config/gfs/config.wavepostbndpnt
index dfeddc79b2..412c5fb42a 100644
--- a/parm/config/gfs/config.wavepostbndpnt
+++ b/parm/config/gfs/config.wavepostbndpnt
@@ -6,6 +6,6 @@
echo "BEGIN: config.wavepostbndpnt"
# Get task specific resources
-. $EXPDIR/config.resources wavepostbndpnt
+source "${EXPDIR}/config.resources" wavepostbndpnt
echo "END: config.wavepostbndpnt"
diff --git a/parm/config/gfs/config.wavepostbndpntbll b/parm/config/gfs/config.wavepostbndpntbll
index bb7224cc70..6695ab0f84 100644
--- a/parm/config/gfs/config.wavepostbndpntbll
+++ b/parm/config/gfs/config.wavepostbndpntbll
@@ -6,6 +6,6 @@
echo "BEGIN: config.wavepostbndpntbll"
# Get task specific resources
-. $EXPDIR/config.resources wavepostbndpntbll
+source "${EXPDIR}/config.resources" wavepostbndpntbll
echo "END: config.wavepostbndpntbll"
diff --git a/parm/config/gfs/config.wavepostpnt b/parm/config/gfs/config.wavepostpnt
index 8befb91760..e87237da82 100644
--- a/parm/config/gfs/config.wavepostpnt
+++ b/parm/config/gfs/config.wavepostpnt
@@ -6,6 +6,6 @@
echo "BEGIN: config.wavepostpnt"
# Get task specific resources
-. $EXPDIR/config.resources wavepostpnt
+source "${EXPDIR}/config.resources" wavepostpnt
echo "END: config.wavepostpnt"
diff --git a/parm/config/gfs/config.wavepostsbs b/parm/config/gfs/config.wavepostsbs
index 8e74aae069..b3c5902e3c 100644
--- a/parm/config/gfs/config.wavepostsbs
+++ b/parm/config/gfs/config.wavepostsbs
@@ -6,7 +6,7 @@
echo "BEGIN: config.wavepostsbs"
# Get task specific resources
-. $EXPDIR/config.resources wavepostsbs
+source "${EXPDIR}/config.resources" wavepostsbs
# Subgrid info for grib2 encoding
export WAV_SUBGRBSRC=""
diff --git a/parm/config/gfs/yaml/defaults.yaml b/parm/config/gfs/yaml/defaults.yaml
index 10af47de07..e6108fff29 100644
--- a/parm/config/gfs/yaml/defaults.yaml
+++ b/parm/config/gfs/yaml/defaults.yaml
@@ -3,16 +3,20 @@ base:
DO_JEDIATMVAR: "NO"
DO_JEDIATMENS: "NO"
DO_JEDIOCNVAR: "NO"
- DO_JEDILANDDA: "NO"
+ DO_JEDISNOWDA: "NO"
DO_MERGENSST: "NO"
DO_GOES: "NO"
FHMAX_GFS: 120
atmanl:
+ LAYOUT_X_ATMANL: 8
+ LAYOUT_Y_ATMANL: 8
IO_LAYOUT_X: 1
IO_LAYOUT_Y: 1
atmensanl:
+ LAYOUT_X_ATMENSANL: 8
+ LAYOUT_Y_ATMENSANL: 8
IO_LAYOUT_X: 1
IO_LAYOUT_Y: 1
@@ -20,7 +24,7 @@ aeroanl:
IO_LAYOUT_X: 1
IO_LAYOUT_Y: 1
-landanl:
+snowanl:
IO_LAYOUT_X: 1
IO_LAYOUT_Y: 1
diff --git a/parm/gdas/aero_jedi_fix.yaml b/parm/gdas/aero_jedi_fix.yaml
index 85a00c3c30..16cbeac6e7 100644
--- a/parm/gdas/aero_jedi_fix.yaml
+++ b/parm/gdas/aero_jedi_fix.yaml
@@ -1,11 +1,11 @@
mkdir:
- !ENV ${DATA}/fv3jedi
copy:
-- - !ENV ${FIXgdas}/fv3jedi/fv3files/akbk$(npz).nc4
+- - !ENV ${FIXgfs}/gdas/fv3jedi/fv3files/akbk$(npz).nc4
- !ENV ${DATA}/fv3jedi/akbk.nc4
-- - !ENV ${FIXgdas}/fv3jedi/fv3files/fmsmpp.nml
+- - !ENV ${FIXgfs}/gdas/fv3jedi/fv3files/fmsmpp.nml
- !ENV ${DATA}/fv3jedi/fmsmpp.nml
-- - !ENV ${FIXgdas}/fv3jedi/fv3files/field_table_gfdl
+- - !ENV ${FIXgfs}/gdas/fv3jedi/fv3files/field_table_gfdl
- !ENV ${DATA}/fv3jedi/field_table
- - !ENV $(HOMEgfs)/sorc/gdas.cd/parm/io/fv3jedi_fieldmetadata_restart.yaml
- !ENV ${DATA}/fv3jedi/fv3jedi_fieldmetadata_restart.yaml
diff --git a/parm/gdas/land_jedi_fix.yaml b/parm/gdas/snow_jedi_fix.yaml
similarity index 100%
rename from parm/gdas/land_jedi_fix.yaml
rename to parm/gdas/snow_jedi_fix.yaml
diff --git a/parm/post/oceanice_products.yaml b/parm/post/oceanice_products.yaml
new file mode 100644
index 0000000000..44b4094c56
--- /dev/null
+++ b/parm/post/oceanice_products.yaml
@@ -0,0 +1,75 @@
+ocnicepost:
+ executable: "ocnicepost.x"
+ namelist:
+ debug: False
+ fix_data:
+ mkdir:
+ - "{{ DATA }}"
+ copy:
+ - ["{{ HOMEgfs }}/exec/ocnicepost.x", "{{ DATA }}/"]
+ - ["{{ HOMEgfs }}/parm/post/ocnicepost.nml.jinja2", "{{ DATA }}/"]
+ - ["{{ HOMEgfs }}/parm/post/{{ component }}.csv", "{{ DATA }}/"]
+ - ["{{ HOMEgfs }}/fix/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Bu.to.Ct.bilinear.nc", "{{ DATA }}/"]
+ - ["{{ HOMEgfs }}/fix/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Cu.to.Ct.bilinear.nc", "{{ DATA }}/"]
+ - ["{{ HOMEgfs }}/fix/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Cv.to.Ct.bilinear.nc", "{{ DATA }}/"]
+ {% for grid in product_grids %}
+ - ["{{ HOMEgfs }}/fix/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Ct.to.rect.{{ grid }}.bilinear.nc", "{{ DATA }}/"]
+ - ["{{ HOMEgfs }}/fix/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Ct.to.rect.{{ grid }}.conserve.nc", "{{ DATA }}/"]
+ - ["{{ HOMEgfs }}/fix/mom6/post/template.global.{{ grid }}.gb2", "{{ DATA }}/"]
+ {% endfor %}
+
+nc2grib2:
+ script: "{{ HOMEgfs }}/ush/oceanice_nc2grib2.sh"
+
+ocean:
+ namelist:
+ ftype: "ocean"
+ maskvar: "temp"
+ sinvar: "sin_rot"
+ cosvar: "cos_rot"
+ angvar: ""
+ {% if model_grid == 'mx025' or model_grid == 'mx050' or model_grid == 'mx100' %}
+ ocean_levels: [5, 15, 25, 35, 45, 55, 65, 75, 85, 95, 105, 115, 125, 135, 145, 155, 165, 175, 185, 195, 205, 215, 225.86945, 241.06255, 266.5239, 308.7874, 373.9288, 467.3998, 593.87915, 757.1453, 959.97325, 1204.059, 1489.9735, 1817.1455, 2183.879, 2587.3995, 3023.9285, 3488.7875, 3976.524, 4481.0625]
+ {% elif model_grid == 'mx500' %}
+ ocean_levels: [5, 15, 25, 35, 45, 55, 65, 75, 85, 95, 105, 115, 125, 135, 145, 155, 165, 175, 185, 195, 205, 215, 225.86945, 241.06255, 266.5239]
+ {% endif %}
+ subset: ['SSH', 'SST', 'SSS', 'speed', 'MLD_003', 'latent', 'sensible', 'SW', 'LW', 'LwLatSens', 'Heat_PmE', 'SSU', 'SSV', 'taux', 'tauy', 'temp', 'so', 'uo', 'vo']
+ data_in:
+ copy:
+ - ["{{ COM_OCEAN_HISTORY }}/{{ RUN }}.ocean.t{{ current_cycle | strftime('%H') }}z.6hr_avg.f{{ '%03d' % forecast_hour }}.nc", "{{ DATA }}/ocean.nc"]
+ data_out:
+ mkdir:
+ - "{{ COM_OCEAN_NETCDF }}"
+ {% for grid in product_grids %}
+ - "{{ COM_OCEAN_GRIB }}/{{ grid }}"
+ {% endfor %}
+ copy:
+ - ["{{ DATA }}/ocean_subset.nc", "{{ COM_OCEAN_NETCDF }}/{{ RUN }}.ocean.t{{ current_cycle | strftime('%H') }}z.native.f{{ '%03d' % forecast_hour }}.nc"]
+ {% for grid in product_grids %}
+ - ["{{ DATA }}/ocean.{{ grid }}.grib2", "{{ COM_OCEAN_GRIB }}/{{ grid }}/{{ RUN }}.ocean.t{{ current_cycle | strftime('%H') }}z.{{ grid }}.f{{ '%03d' % forecast_hour }}.grib2"]
+ - ["{{ DATA }}/ocean.{{ grid }}.grib2.idx", "{{ COM_OCEAN_GRIB }}/{{ grid }}/{{ RUN }}.ocean.t{{ current_cycle | strftime('%H') }}z.{{ grid }}.f{{ '%03d' % forecast_hour }}.grib2.idx"]
+ {% endfor %}
+
+ice:
+ namelist:
+ ftype: "ice"
+ maskvar: "tmask"
+ sinvar: ""
+ cosvar: ""
+ angvar: "ANGLET"
+ subset: ['hi_h', 'hs_h', 'aice_h', 'Tsfc_h', 'uvel_h', 'vvel_h', 'frzmlt_h', 'albsni_h', 'mlt_onset_h', 'frz_onset_h']
+ data_in:
+ copy:
+ - ["{{ COM_ICE_HISTORY }}/{{ RUN }}.ice.t{{ current_cycle | strftime('%H') }}z.6hr_avg.f{{ '%03d' % forecast_hour }}.nc", "{{ DATA }}/ice.nc"]
+ data_out:
+ mkdir:
+ - "{{ COM_ICE_NETCDF }}"
+ {% for grid in product_grids %}
+ - "{{ COM_ICE_GRIB }}/{{ grid }}"
+ {% endfor %}
+ copy:
+ - ["{{ DATA }}/ice_subset.nc", "{{ COM_ICE_NETCDF }}/{{ RUN }}.ice.t{{ current_cycle | strftime('%H') }}z.native.f{{ '%03d' % forecast_hour }}.nc"]
+ {% for grid in product_grids %}
+ - ["{{ DATA }}/ice.{{ grid }}.grib2", "{{ COM_ICE_GRIB }}/{{ grid }}/{{ RUN }}.ice.t{{ current_cycle | strftime('%H') }}z.{{ grid }}.f{{ '%03d' % forecast_hour }}.grib2"]
+ - ["{{ DATA }}/ice.{{ grid }}.grib2.idx", "{{ COM_ICE_GRIB }}/{{ grid }}/{{ RUN }}.ice.t{{ current_cycle | strftime('%H') }}z.{{ grid }}.f{{ '%03d' % forecast_hour }}.grib2.idx"]
+ {% endfor %}
diff --git a/parm/ufs/fix/gfs/atmos.fixed_files.yaml b/parm/ufs/fix/gfs/atmos.fixed_files.yaml
index 7d901fe17b..8db691b49c 100644
--- a/parm/ufs/fix/gfs/atmos.fixed_files.yaml
+++ b/parm/ufs/fix/gfs/atmos.fixed_files.yaml
@@ -1,85 +1,85 @@
copy:
# Atmosphere mosaic file linked as the grid_spec file (atm only)
- - [$(FIXorog)/$(CASE)/$(CASE)_mosaic.nc, $(DATA)/INPUT/grid_spec.nc]
+ - [$(FIXgfs)/orog/$(CASE)/$(CASE)_mosaic.nc, $(DATA)/INPUT/grid_spec.nc]
# Atmosphere grid tile files
- - [$(FIXorog)/$(CASE)/$(CASE)_grid.tile1.nc, $(DATA)/INPUT/]
- - [$(FIXorog)/$(CASE)/$(CASE)_grid.tile2.nc, $(DATA)/INPUT/]
- - [$(FIXorog)/$(CASE)/$(CASE)_grid.tile3.nc, $(DATA)/INPUT/]
- - [$(FIXorog)/$(CASE)/$(CASE)_grid.tile4.nc, $(DATA)/INPUT/]
- - [$(FIXorog)/$(CASE)/$(CASE)_grid.tile5.nc, $(DATA)/INPUT/]
- - [$(FIXorog)/$(CASE)/$(CASE)_grid.tile6.nc, $(DATA)/INPUT/]
+ - [$(FIXgfs)/orog/$(CASE)/$(CASE)_grid.tile1.nc, $(DATA)/INPUT/]
+ - [$(FIXgfs)/orog/$(CASE)/$(CASE)_grid.tile2.nc, $(DATA)/INPUT/]
+ - [$(FIXgfs)/orog/$(CASE)/$(CASE)_grid.tile3.nc, $(DATA)/INPUT/]
+ - [$(FIXgfs)/orog/$(CASE)/$(CASE)_grid.tile4.nc, $(DATA)/INPUT/]
+ - [$(FIXgfs)/orog/$(CASE)/$(CASE)_grid.tile5.nc, $(DATA)/INPUT/]
+ - [$(FIXgfs)/orog/$(CASE)/$(CASE)_grid.tile6.nc, $(DATA)/INPUT/]
- # oro_data_ls and oro_data_ss files from FIXugwd
- - [$(FIXugwd)/$(CASE)/$(CASE)_oro_data_ls.tile1.nc, $(DATA)/INPUT/oro_data_ls.tile1.nc]
- - [$(FIXugwd)/$(CASE)/$(CASE)_oro_data_ls.tile2.nc, $(DATA)/INPUT/oro_data_ls.tile2.nc]
- - [$(FIXugwd)/$(CASE)/$(CASE)_oro_data_ls.tile3.nc, $(DATA)/INPUT/oro_data_ls.tile3.nc]
- - [$(FIXugwd)/$(CASE)/$(CASE)_oro_data_ls.tile4.nc, $(DATA)/INPUT/oro_data_ls.tile4.nc]
- - [$(FIXugwd)/$(CASE)/$(CASE)_oro_data_ls.tile5.nc, $(DATA)/INPUT/oro_data_ls.tile5.nc]
- - [$(FIXugwd)/$(CASE)/$(CASE)_oro_data_ls.tile6.nc, $(DATA)/INPUT/oro_data_ls.tile6.nc]
- - [$(FIXugwd)/$(CASE)/$(CASE)_oro_data_ss.tile1.nc, $(DATA)/INPUT/oro_data_ss.tile1.nc]
- - [$(FIXugwd)/$(CASE)/$(CASE)_oro_data_ss.tile2.nc, $(DATA)/INPUT/oro_data_ss.tile2.nc]
- - [$(FIXugwd)/$(CASE)/$(CASE)_oro_data_ss.tile3.nc, $(DATA)/INPUT/oro_data_ss.tile3.nc]
- - [$(FIXugwd)/$(CASE)/$(CASE)_oro_data_ss.tile4.nc, $(DATA)/INPUT/oro_data_ss.tile4.nc]
- - [$(FIXugwd)/$(CASE)/$(CASE)_oro_data_ss.tile5.nc, $(DATA)/INPUT/oro_data_ss.tile5.nc]
- - [$(FIXugwd)/$(CASE)/$(CASE)_oro_data_ss.tile6.nc, $(DATA)/INPUT/oro_data_ss.tile6.nc]
+ # oro_data_ls and oro_data_ss files from FIXgfs/ugwd
+ - [$(FIXgfs)/ugwd/$(CASE)/$(CASE)_oro_data_ls.tile1.nc, $(DATA)/INPUT/oro_data_ls.tile1.nc]
+ - [$(FIXgfs)/ugwd/$(CASE)/$(CASE)_oro_data_ls.tile2.nc, $(DATA)/INPUT/oro_data_ls.tile2.nc]
+ - [$(FIXgfs)/ugwd/$(CASE)/$(CASE)_oro_data_ls.tile3.nc, $(DATA)/INPUT/oro_data_ls.tile3.nc]
+ - [$(FIXgfs)/ugwd/$(CASE)/$(CASE)_oro_data_ls.tile4.nc, $(DATA)/INPUT/oro_data_ls.tile4.nc]
+ - [$(FIXgfs)/ugwd/$(CASE)/$(CASE)_oro_data_ls.tile5.nc, $(DATA)/INPUT/oro_data_ls.tile5.nc]
+ - [$(FIXgfs)/ugwd/$(CASE)/$(CASE)_oro_data_ls.tile6.nc, $(DATA)/INPUT/oro_data_ls.tile6.nc]
+ - [$(FIXgfs)/ugwd/$(CASE)/$(CASE)_oro_data_ss.tile1.nc, $(DATA)/INPUT/oro_data_ss.tile1.nc]
+ - [$(FIXgfs)/ugwd/$(CASE)/$(CASE)_oro_data_ss.tile2.nc, $(DATA)/INPUT/oro_data_ss.tile2.nc]
+ - [$(FIXgfs)/ugwd/$(CASE)/$(CASE)_oro_data_ss.tile3.nc, $(DATA)/INPUT/oro_data_ss.tile3.nc]
+ - [$(FIXgfs)/ugwd/$(CASE)/$(CASE)_oro_data_ss.tile4.nc, $(DATA)/INPUT/oro_data_ss.tile4.nc]
+ - [$(FIXgfs)/ugwd/$(CASE)/$(CASE)_oro_data_ss.tile5.nc, $(DATA)/INPUT/oro_data_ss.tile5.nc]
+ - [$(FIXgfs)/ugwd/$(CASE)/$(CASE)_oro_data_ss.tile6.nc, $(DATA)/INPUT/oro_data_ss.tile6.nc]
# GWD??
- - [$(FIXugwd)/ugwp_limb_tau.nc, $(DATA)/ugwp_limb_tau.nc]
+ - [$(FIXgfs)/ugwd/ugwp_limb_tau.nc, $(DATA)/ugwp_limb_tau.nc]
# CO2 climatology
- - [$(FIXam)/co2monthlycyc.txt, $(DATA)/co2monthlycyc.txt]
- - [$(FIXam)/global_co2historicaldata_glob.txt, $(DATA)/co2historicaldata_glob.txt]
- - [$(FIXam)/fix_co2_proj/global_co2historicaldata_2009.txt, $(DATA)/co2historicaldata_2009.txt]
- - [$(FIXam)/fix_co2_proj/global_co2historicaldata_2010.txt, $(DATA)/co2historicaldata_2010.txt]
- - [$(FIXam)/fix_co2_proj/global_co2historicaldata_2011.txt, $(DATA)/co2historicaldata_2011.txt]
- - [$(FIXam)/fix_co2_proj/global_co2historicaldata_2012.txt, $(DATA)/co2historicaldata_2012.txt]
- - [$(FIXam)/fix_co2_proj/global_co2historicaldata_2013.txt, $(DATA)/co2historicaldata_2013.txt]
- - [$(FIXam)/fix_co2_proj/global_co2historicaldata_2014.txt, $(DATA)/co2historicaldata_2014.txt]
- - [$(FIXam)/fix_co2_proj/global_co2historicaldata_2015.txt, $(DATA)/co2historicaldata_2015.txt]
- - [$(FIXam)/fix_co2_proj/global_co2historicaldata_2016.txt, $(DATA)/co2historicaldata_2016.txt]
- - [$(FIXam)/fix_co2_proj/global_co2historicaldata_2017.txt, $(DATA)/co2historicaldata_2017.txt]
- - [$(FIXam)/fix_co2_proj/global_co2historicaldata_2018.txt, $(DATA)/co2historicaldata_2018.txt]
- - [$(FIXam)/fix_co2_proj/global_co2historicaldata_2019.txt, $(DATA)/co2historicaldata_2019.txt]
- - [$(FIXam)/fix_co2_proj/global_co2historicaldata_2020.txt, $(DATA)/co2historicaldata_2020.txt]
- - [$(FIXam)/fix_co2_proj/global_co2historicaldata_2021.txt, $(DATA)/co2historicaldata_2021.txt]
- - [$(FIXam)/fix_co2_proj/global_co2historicaldata_2022.txt, $(DATA)/co2historicaldata_2022.txt]
- - [$(FIXam)/fix_co2_proj/global_co2historicaldata_2023.txt, $(DATA)/co2historicaldata_2023.txt]
+ - [$(FIXgfs)/am/co2monthlycyc.txt, $(DATA)/co2monthlycyc.txt]
+ - [$(FIXgfs)/am/global_co2historicaldata_glob.txt, $(DATA)/co2historicaldata_glob.txt]
+ - [$(FIXgfs)/am/fix_co2_proj/global_co2historicaldata_2009.txt, $(DATA)/co2historicaldata_2009.txt]
+ - [$(FIXgfs)/am/fix_co2_proj/global_co2historicaldata_2010.txt, $(DATA)/co2historicaldata_2010.txt]
+ - [$(FIXgfs)/am/fix_co2_proj/global_co2historicaldata_2011.txt, $(DATA)/co2historicaldata_2011.txt]
+ - [$(FIXgfs)/am/fix_co2_proj/global_co2historicaldata_2012.txt, $(DATA)/co2historicaldata_2012.txt]
+ - [$(FIXgfs)/am/fix_co2_proj/global_co2historicaldata_2013.txt, $(DATA)/co2historicaldata_2013.txt]
+ - [$(FIXgfs)/am/fix_co2_proj/global_co2historicaldata_2014.txt, $(DATA)/co2historicaldata_2014.txt]
+ - [$(FIXgfs)/am/fix_co2_proj/global_co2historicaldata_2015.txt, $(DATA)/co2historicaldata_2015.txt]
+ - [$(FIXgfs)/am/fix_co2_proj/global_co2historicaldata_2016.txt, $(DATA)/co2historicaldata_2016.txt]
+ - [$(FIXgfs)/am/fix_co2_proj/global_co2historicaldata_2017.txt, $(DATA)/co2historicaldata_2017.txt]
+ - [$(FIXgfs)/am/fix_co2_proj/global_co2historicaldata_2018.txt, $(DATA)/co2historicaldata_2018.txt]
+ - [$(FIXgfs)/am/fix_co2_proj/global_co2historicaldata_2019.txt, $(DATA)/co2historicaldata_2019.txt]
+ - [$(FIXgfs)/am/fix_co2_proj/global_co2historicaldata_2020.txt, $(DATA)/co2historicaldata_2020.txt]
+ - [$(FIXgfs)/am/fix_co2_proj/global_co2historicaldata_2021.txt, $(DATA)/co2historicaldata_2021.txt]
+ - [$(FIXgfs)/am/fix_co2_proj/global_co2historicaldata_2022.txt, $(DATA)/co2historicaldata_2022.txt]
+ - [$(FIXgfs)/am/fix_co2_proj/global_co2historicaldata_2023.txt, $(DATA)/co2historicaldata_2023.txt]
- # FIXam files
- - [$(FIXam)/global_climaeropac_global.txt, $(DATA)/aerosol.dat]
- - [$(FIXam)/ozprdlos_2015_new_sbuvO3_tclm15_nuchem.f77, $(DATA)/global_o3prdlos.f77]
- - [$(FIXam)/global_h2o_pltc.f77, $(DATA)/global_h2oprdlos.f77]
- - [$(FIXam)/global_glacier.2x2.grb, $(DATA)/global_glacier.2x2.grb]
- - [$(FIXam)/global_maxice.2x2.grb, $(DATA)/global_maxice.2x2.grb]
- - [$(FIXam)/global_snoclim.1.875.grb, $(DATA)/global_snoclim.1.875.grb]
- - [$(FIXam)/global_slmask.t1534.3072.1536.grb, $(DATA)/global_slmask.t1534.3072.1536.grb]
- - [$(FIXam)/global_soilmgldas.statsgo.t1534.3072.1536.grb, $(DATA)/global_soilmgldas.statsgo.t1534.3072.1536.grb]
- - [$(FIXam)/global_solarconstant_noaa_an.txt, $(DATA)/solarconstant_noaa_an.txt]
- - [$(FIXam)/global_sfc_emissivity_idx.txt, $(DATA)/sfc_emissivity_idx.txt]
- - [$(FIXam)/RTGSST.1982.2012.monthly.clim.grb, $(DATA)/RTGSST.1982.2012.monthly.clim.grb]
- - [$(FIXam)/IMS-NIC.blended.ice.monthly.clim.grb, $(DATA)/IMS-NIC.blended.ice.monthly.clim.grb]
+ # FIXgfs/am files
+ - [$(FIXgfs)/am/global_climaeropac_global.txt, $(DATA)/aerosol.dat]
+ - [$(FIXgfs)/am/ozprdlos_2015_new_sbuvO3_tclm15_nuchem.f77, $(DATA)/global_o3prdlos.f77]
+ - [$(FIXgfs)/am/global_h2o_pltc.f77, $(DATA)/global_h2oprdlos.f77]
+ - [$(FIXgfs)/am/global_glacier.2x2.grb, $(DATA)/global_glacier.2x2.grb]
+ - [$(FIXgfs)/am/global_maxice.2x2.grb, $(DATA)/global_maxice.2x2.grb]
+ - [$(FIXgfs)/am/global_snoclim.1.875.grb, $(DATA)/global_snoclim.1.875.grb]
+ - [$(FIXgfs)/am/global_slmask.t1534.3072.1536.grb, $(DATA)/global_slmask.t1534.3072.1536.grb]
+ - [$(FIXgfs)/am/global_soilmgldas.statsgo.t1534.3072.1536.grb, $(DATA)/global_soilmgldas.statsgo.t1534.3072.1536.grb]
+ - [$(FIXgfs)/am/global_solarconstant_noaa_an.txt, $(DATA)/solarconstant_noaa_an.txt]
+ - [$(FIXgfs)/am/global_sfc_emissivity_idx.txt, $(DATA)/sfc_emissivity_idx.txt]
+ - [$(FIXgfs)/am/RTGSST.1982.2012.monthly.clim.grb, $(DATA)/RTGSST.1982.2012.monthly.clim.grb]
+ - [$(FIXgfs)/am/IMS-NIC.blended.ice.monthly.clim.grb, $(DATA)/IMS-NIC.blended.ice.monthly.clim.grb]
# MERRA2 Aerosol Climatology
- - [$(FIXaer)/merra2.aerclim.2003-2014.m01.nc, $(DATA)/aeroclim.m01.nc]
- - [$(FIXaer)/merra2.aerclim.2003-2014.m02.nc, $(DATA)/aeroclim.m02.nc]
- - [$(FIXaer)/merra2.aerclim.2003-2014.m03.nc, $(DATA)/aeroclim.m03.nc]
- - [$(FIXaer)/merra2.aerclim.2003-2014.m04.nc, $(DATA)/aeroclim.m04.nc]
- - [$(FIXaer)/merra2.aerclim.2003-2014.m05.nc, $(DATA)/aeroclim.m05.nc]
- - [$(FIXaer)/merra2.aerclim.2003-2014.m06.nc, $(DATA)/aeroclim.m06.nc]
- - [$(FIXaer)/merra2.aerclim.2003-2014.m07.nc, $(DATA)/aeroclim.m07.nc]
- - [$(FIXaer)/merra2.aerclim.2003-2014.m08.nc, $(DATA)/aeroclim.m08.nc]
- - [$(FIXaer)/merra2.aerclim.2003-2014.m09.nc, $(DATA)/aeroclim.m09.nc]
- - [$(FIXaer)/merra2.aerclim.2003-2014.m10.nc, $(DATA)/aeroclim.m10.nc]
- - [$(FIXaer)/merra2.aerclim.2003-2014.m11.nc, $(DATA)/aeroclim.m11.nc]
- - [$(FIXaer)/merra2.aerclim.2003-2014.m12.nc, $(DATA)/aeroclim.m12.nc]
+ - [$(FIXgfs)/aer/merra2.aerclim.2003-2014.m01.nc, $(DATA)/aeroclim.m01.nc]
+ - [$(FIXgfs)/aer/merra2.aerclim.2003-2014.m02.nc, $(DATA)/aeroclim.m02.nc]
+ - [$(FIXgfs)/aer/merra2.aerclim.2003-2014.m03.nc, $(DATA)/aeroclim.m03.nc]
+ - [$(FIXgfs)/aer/merra2.aerclim.2003-2014.m04.nc, $(DATA)/aeroclim.m04.nc]
+ - [$(FIXgfs)/aer/merra2.aerclim.2003-2014.m05.nc, $(DATA)/aeroclim.m05.nc]
+ - [$(FIXgfs)/aer/merra2.aerclim.2003-2014.m06.nc, $(DATA)/aeroclim.m06.nc]
+ - [$(FIXgfs)/aer/merra2.aerclim.2003-2014.m07.nc, $(DATA)/aeroclim.m07.nc]
+ - [$(FIXgfs)/aer/merra2.aerclim.2003-2014.m08.nc, $(DATA)/aeroclim.m08.nc]
+ - [$(FIXgfs)/aer/merra2.aerclim.2003-2014.m09.nc, $(DATA)/aeroclim.m09.nc]
+ - [$(FIXgfs)/aer/merra2.aerclim.2003-2014.m10.nc, $(DATA)/aeroclim.m10.nc]
+ - [$(FIXgfs)/aer/merra2.aerclim.2003-2014.m11.nc, $(DATA)/aeroclim.m11.nc]
+ - [$(FIXgfs)/aer/merra2.aerclim.2003-2014.m12.nc, $(DATA)/aeroclim.m12.nc]
# Optical depth
- - [$(FIXlut)/optics_BC.v1_3.dat, $(DATA)/optics_BC.dat]
- - [$(FIXlut)/optics_DU.v15_3.dat, $(DATA)/optics_DU.dat]
- - [$(FIXlut)/optics_OC.v1_3.dat, $(DATA)/optics_OC.dat]
- - [$(FIXlut)/optics_SS.v3_3.dat, $(DATA)/optics_SS.dat]
- - [$(FIXlut)/optics_SU.v1_3.dat, $(DATA)/optics_SU.dat]
+ - [$(FIXgfs)/lut/optics_BC.v1_3.dat, $(DATA)/optics_BC.dat]
+ - [$(FIXgfs)/lut/optics_DU.v15_3.dat, $(DATA)/optics_DU.dat]
+ - [$(FIXgfs)/lut/optics_OC.v1_3.dat, $(DATA)/optics_OC.dat]
+ - [$(FIXgfs)/lut/optics_SS.v3_3.dat, $(DATA)/optics_SS.dat]
+ - [$(FIXgfs)/lut/optics_SU.v1_3.dat, $(DATA)/optics_SU.dat]
# fd_ufs.yaml file
- [$(HOMEgfs)/sorc/ufs_model.fd/tests/parm/fd_ufs.yaml, $(DATA)/]
diff --git a/parm/ufs/fix/gfs/land.fixed_files.yaml b/parm/ufs/fix/gfs/land.fixed_files.yaml
index bb2d060963..8e4d221dbc 100644
--- a/parm/ufs/fix/gfs/land.fixed_files.yaml
+++ b/parm/ufs/fix/gfs/land.fixed_files.yaml
@@ -1,58 +1,58 @@
copy:
- # Files from FIXorog/C??/sfc
- - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).facsf.tile1.nc, $(DATA)/]
- - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).facsf.tile2.nc, $(DATA)/]
- - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).facsf.tile3.nc, $(DATA)/]
- - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).facsf.tile4.nc, $(DATA)/]
- - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).facsf.tile5.nc, $(DATA)/]
- - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).facsf.tile6.nc, $(DATA)/]
+ # Files from FIXgfs/orog/C??/sfc
+ - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).facsf.tile1.nc, $(DATA)/]
+ - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).facsf.tile2.nc, $(DATA)/]
+ - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).facsf.tile3.nc, $(DATA)/]
+ - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).facsf.tile4.nc, $(DATA)/]
+ - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).facsf.tile5.nc, $(DATA)/]
+ - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).facsf.tile6.nc, $(DATA)/]
- - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).maximum_snow_albedo.tile1.nc, $(DATA)/]
- - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).maximum_snow_albedo.tile2.nc, $(DATA)/]
- - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).maximum_snow_albedo.tile3.nc, $(DATA)/]
- - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).maximum_snow_albedo.tile4.nc, $(DATA)/]
- - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).maximum_snow_albedo.tile5.nc, $(DATA)/]
- - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).maximum_snow_albedo.tile6.nc, $(DATA)/]
+ - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).maximum_snow_albedo.tile1.nc, $(DATA)/]
+ - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).maximum_snow_albedo.tile2.nc, $(DATA)/]
+ - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).maximum_snow_albedo.tile3.nc, $(DATA)/]
+ - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).maximum_snow_albedo.tile4.nc, $(DATA)/]
+ - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).maximum_snow_albedo.tile5.nc, $(DATA)/]
+ - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).maximum_snow_albedo.tile6.nc, $(DATA)/]
- - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).slope_type.tile1.nc, $(DATA)/]
- - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).slope_type.tile2.nc, $(DATA)/]
- - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).slope_type.tile3.nc, $(DATA)/]
- - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).slope_type.tile4.nc, $(DATA)/]
- - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).slope_type.tile5.nc, $(DATA)/]
- - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).slope_type.tile6.nc, $(DATA)/]
+ - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).slope_type.tile1.nc, $(DATA)/]
+ - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).slope_type.tile2.nc, $(DATA)/]
+ - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).slope_type.tile3.nc, $(DATA)/]
+ - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).slope_type.tile4.nc, $(DATA)/]
+ - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).slope_type.tile5.nc, $(DATA)/]
+ - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).slope_type.tile6.nc, $(DATA)/]
- - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).snowfree_albedo.tile1.nc, $(DATA)/]
- - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).snowfree_albedo.tile2.nc, $(DATA)/]
- - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).snowfree_albedo.tile3.nc, $(DATA)/]
- - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).snowfree_albedo.tile4.nc, $(DATA)/]
- - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).snowfree_albedo.tile5.nc, $(DATA)/]
- - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).snowfree_albedo.tile6.nc, $(DATA)/]
+ - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).snowfree_albedo.tile1.nc, $(DATA)/]
+ - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).snowfree_albedo.tile2.nc, $(DATA)/]
+ - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).snowfree_albedo.tile3.nc, $(DATA)/]
+ - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).snowfree_albedo.tile4.nc, $(DATA)/]
+ - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).snowfree_albedo.tile5.nc, $(DATA)/]
+ - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).snowfree_albedo.tile6.nc, $(DATA)/]
- - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).soil_type.tile1.nc, $(DATA)/]
- - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).soil_type.tile2.nc, $(DATA)/]
- - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).soil_type.tile3.nc, $(DATA)/]
- - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).soil_type.tile4.nc, $(DATA)/]
- - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).soil_type.tile5.nc, $(DATA)/]
- - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).soil_type.tile6.nc, $(DATA)/]
+ - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).soil_type.tile1.nc, $(DATA)/]
+ - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).soil_type.tile2.nc, $(DATA)/]
+ - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).soil_type.tile3.nc, $(DATA)/]
+ - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).soil_type.tile4.nc, $(DATA)/]
+ - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).soil_type.tile5.nc, $(DATA)/]
+ - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).soil_type.tile6.nc, $(DATA)/]
- - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).substrate_temperature.tile1.nc, $(DATA)/]
- - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).substrate_temperature.tile2.nc, $(DATA)/]
- - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).substrate_temperature.tile3.nc, $(DATA)/]
- - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).substrate_temperature.tile4.nc, $(DATA)/]
- - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).substrate_temperature.tile5.nc, $(DATA)/]
- - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).substrate_temperature.tile6.nc, $(DATA)/]
+ - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).substrate_temperature.tile1.nc, $(DATA)/]
+ - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).substrate_temperature.tile2.nc, $(DATA)/]
+ - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).substrate_temperature.tile3.nc, $(DATA)/]
+ - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).substrate_temperature.tile4.nc, $(DATA)/]
+ - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).substrate_temperature.tile5.nc, $(DATA)/]
+ - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).substrate_temperature.tile6.nc, $(DATA)/]
- - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_greenness.tile1.nc, $(DATA)/]
- - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_greenness.tile2.nc, $(DATA)/]
- - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_greenness.tile3.nc, $(DATA)/]
- - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_greenness.tile4.nc, $(DATA)/]
- - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_greenness.tile5.nc, $(DATA)/]
- - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_greenness.tile6.nc, $(DATA)/]
+ - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_greenness.tile1.nc, $(DATA)/]
+ - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_greenness.tile2.nc, $(DATA)/]
+ - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_greenness.tile3.nc, $(DATA)/]
+ - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_greenness.tile4.nc, $(DATA)/]
+ - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_greenness.tile5.nc, $(DATA)/]
+ - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_greenness.tile6.nc, $(DATA)/]
- - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_type.tile1.nc, $(DATA)/]
- - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_type.tile2.nc, $(DATA)/]
- - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_type.tile3.nc, $(DATA)/]
- - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_type.tile4.nc, $(DATA)/]
- - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_type.tile5.nc, $(DATA)/]
- - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_type.tile6.nc, $(DATA)/]
+ - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_type.tile1.nc, $(DATA)/]
+ - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_type.tile2.nc, $(DATA)/]
+ - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_type.tile3.nc, $(DATA)/]
+ - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_type.tile4.nc, $(DATA)/]
+ - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_type.tile5.nc, $(DATA)/]
+ - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_type.tile6.nc, $(DATA)/]
diff --git a/parm/ufs/fix/gfs/ocean.fixed_files.yaml b/parm/ufs/fix/gfs/ocean.fixed_files.yaml
index 1ca8ce7a68..4ef19bab0d 100644
--- a/parm/ufs/fix/gfs/ocean.fixed_files.yaml
+++ b/parm/ufs/fix/gfs/ocean.fixed_files.yaml
@@ -1,9 +1,9 @@
copy:
# Orography data tile files
- - [$(FIXorog)/$(CASE)/$(CASE).mx$(OCNRES)_oro_data.tile1.nc, $(DATA)/INPUT/oro_data.tile1.nc]
- - [$(FIXorog)/$(CASE)/$(CASE).mx$(OCNRES)_oro_data.tile2.nc, $(DATA)/INPUT/oro_data.tile2.nc]
- - [$(FIXorog)/$(CASE)/$(CASE).mx$(OCNRES)_oro_data.tile3.nc, $(DATA)/INPUT/oro_data.tile3.nc]
- - [$(FIXorog)/$(CASE)/$(CASE).mx$(OCNRES)_oro_data.tile4.nc, $(DATA)/INPUT/oro_data.tile4.nc]
- - [$(FIXorog)/$(CASE)/$(CASE).mx$(OCNRES)_oro_data.tile5.nc, $(DATA)/INPUT/oro_data.tile5.nc]
- - [$(FIXorog)/$(CASE)/$(CASE).mx$(OCNRES)_oro_data.tile6.nc, $(DATA)/INPUT/oro_data.tile6.nc]
+ - [$(FIXgfs)/orog/$(CASE)/$(CASE).mx$(OCNRES)_oro_data.tile1.nc, $(DATA)/INPUT/oro_data.tile1.nc]
+ - [$(FIXgfs)/orog/$(CASE)/$(CASE).mx$(OCNRES)_oro_data.tile2.nc, $(DATA)/INPUT/oro_data.tile2.nc]
+ - [$(FIXgfs)/orog/$(CASE)/$(CASE).mx$(OCNRES)_oro_data.tile3.nc, $(DATA)/INPUT/oro_data.tile3.nc]
+ - [$(FIXgfs)/orog/$(CASE)/$(CASE).mx$(OCNRES)_oro_data.tile4.nc, $(DATA)/INPUT/oro_data.tile4.nc]
+ - [$(FIXgfs)/orog/$(CASE)/$(CASE).mx$(OCNRES)_oro_data.tile5.nc, $(DATA)/INPUT/oro_data.tile5.nc]
+ - [$(FIXgfs)/orog/$(CASE)/$(CASE).mx$(OCNRES)_oro_data.tile6.nc, $(DATA)/INPUT/oro_data.tile6.nc]
diff --git a/parm/ufs/fv3/diag_table b/parm/ufs/fv3/diag_table
index b972b3470c..47106cb294 100644
--- a/parm/ufs/fv3/diag_table
+++ b/parm/ufs/fv3/diag_table
@@ -1,7 +1,7 @@
"fv3_history", 0, "hours", 1, "hours", "time"
"fv3_history2d", 0, "hours", 1, "hours", "time"
-"ocn%4yr%2mo%2dy%2hr", 6, "hours", 1, "hours", "time", 6, "hours", "1901 1 1 0 0 0"
-"ocn_daily%4yr%2mo%2dy", 1, "days", 1, "days", "time", 1, "days", "1901 1 1 0 0 0"
+"ocn%4yr%2mo%2dy%2hr", @[FHOUT_OCNICE], "hours", 1, "hours", "time", @[FHOUT_OCNICE], "hours", "@[SYEAR] @[SMONTH] @[SDAY] @[CHOUR] 0 0"
+"ocn_daily%4yr%2mo%2dy", 1, "days", 1, "days", "time", 1, "days", "@[SYEAR] @[SMONTH] @[SDAY] @[CHOUR] 0 0"
##############
# Ocean fields
diff --git a/parm/wave/at_10m_interp.inp.tmpl b/parm/wave/at_10m_interp.inp.tmpl
index b2a80081e1..6f4c1f7099 100755
--- a/parm/wave/at_10m_interp.inp.tmpl
+++ b/parm/wave/at_10m_interp.inp.tmpl
@@ -5,7 +5,7 @@ $ Start Time DT NSteps
$ Total number of grids
2
$ Grid extensions
- 'gnh_10m'
+ 'uglo_m1g16'
'at_10m'
$
0
diff --git a/parm/wave/ep_10m_interp.inp.tmpl b/parm/wave/ep_10m_interp.inp.tmpl
index 0848854ccf..23cfd50c2e 100755
--- a/parm/wave/ep_10m_interp.inp.tmpl
+++ b/parm/wave/ep_10m_interp.inp.tmpl
@@ -5,7 +5,7 @@ $ Start Time DT NSteps
$ Total number of grids
2
$ Grid extensions
- 'gnh_10m'
+ 'uglo_m1g16'
'ep_10m'
$
0
diff --git a/parm/wave/glo_15mxt_interp.inp.tmpl b/parm/wave/glo_15mxt_interp.inp.tmpl
index 74bc9eebf4..19e9dae684 100755
--- a/parm/wave/glo_15mxt_interp.inp.tmpl
+++ b/parm/wave/glo_15mxt_interp.inp.tmpl
@@ -3,11 +3,9 @@ $------------------------------------------------
$ Start Time DT NSteps
TIME DT NSTEPS
$ Total number of grids
- 4
+ 2
$ Grid extensions
- 'gnh_10m'
- 'aoc_9km'
- 'gsh_15m'
+ 'uglo_m1g16'
'glo_15mxt'
$
0
diff --git a/parm/wave/glo_200_interp.inp.tmpl b/parm/wave/glo_200_interp.inp.tmpl
new file mode 100755
index 0000000000..c238a6fe0b
--- /dev/null
+++ b/parm/wave/glo_200_interp.inp.tmpl
@@ -0,0 +1,12 @@
+$ Input file for interpolation of GLO30m_ext Grid
+$------------------------------------------------
+$ Start Time DT NSteps
+ TIME DT NSTEPS
+$ Total number of grids
+ 2
+$ Grid extensions
+ 'uglo_100km'
+ 'glo_200'
+$
+ 0
+$
diff --git a/parm/wave/glo_30m_interp.inp.tmpl b/parm/wave/glo_30m_interp.inp.tmpl
index ea1baf7fc4..c62881202c 100755
--- a/parm/wave/glo_30m_interp.inp.tmpl
+++ b/parm/wave/glo_30m_interp.inp.tmpl
@@ -3,11 +3,9 @@ $------------------------------------------------
$ Start Time DT NSteps
TIME DT NSTEPS
$ Total number of grids
- 4
+ 2
$ Grid extensions
- 'gnh_10m'
- 'aoc_9km'
- 'gsh_15m'
+ 'uglo_m1g16'
'glo_30m'
$
0
diff --git a/parm/wave/wc_10m_interp.inp.tmpl b/parm/wave/wc_10m_interp.inp.tmpl
index abb51b4dfc..8338c91d0c 100755
--- a/parm/wave/wc_10m_interp.inp.tmpl
+++ b/parm/wave/wc_10m_interp.inp.tmpl
@@ -5,7 +5,7 @@ $ Start Time DT NSteps
$ Total number of grids
2
$ Grid extensions
- 'gnh_10m'
+ 'uglo_m1g16'
'wc_10m'
$
0
diff --git a/scripts/exgdas_atmos_chgres_forenkf.sh b/scripts/exgdas_atmos_chgres_forenkf.sh
index d48d58947e..40f702beb2 100755
--- a/scripts/exgdas_atmos_chgres_forenkf.sh
+++ b/scripts/exgdas_atmos_chgres_forenkf.sh
@@ -21,7 +21,6 @@ source "$HOMEgfs/ush/preamble.sh"
# Directories.
pwd=$(pwd)
-export FIXam=${FIXam:-$HOMEgfs/fix/am}
# Base variables
CDATE=${CDATE:-"2001010100"}
@@ -59,7 +58,7 @@ SENDECF=${SENDECF:-"NO"}
SENDDBN=${SENDDBN:-"NO"}
# level info file
-SIGLEVEL=${SIGLEVEL:-${FIXam}/global_hyblev.l${LEVS}.txt}
+SIGLEVEL=${SIGLEVEL:-${FIXgfs}/am/global_hyblev.l${LEVS}.txt}
# forecast files
APREFIX=${APREFIX:-""}
@@ -129,7 +128,7 @@ if [ $DO_CALC_ANALYSIS == "YES" ]; then
$NLN $ATMF09ENS fcst.ensres.09
fi
export OMP_NUM_THREADS=$NTHREADS_CHGRES
- SIGLEVEL=${SIGLEVEL:-${FIXam}/global_hyblev.l${LEVS_ENKF}.txt}
+ SIGLEVEL=${SIGLEVEL:-${FIXgfs}/am/global_hyblev.l${LEVS_ENKF}.txt}
if [ $USE_CFP = "YES" ]; then
[[ -f $DATA/mp_chgres.sh ]] && rm $DATA/mp_chgres.sh
diff --git a/scripts/exgdas_atmos_nawips.sh b/scripts/exgdas_atmos_nawips.sh
index 94a23f2a85..83781bac5b 100755
--- a/scripts/exgdas_atmos_nawips.sh
+++ b/scripts/exgdas_atmos_nawips.sh
@@ -22,27 +22,27 @@ DATA_RUN=$DATA/$RUN2
mkdir -p $DATA_RUN
cd $DATA_RUN
-cp $FIXgempak/g2varswmo2.tbl g2varswmo2.tbl
+cp ${HOMEgfs}/gempak/fix/g2varswmo2.tbl g2varswmo2.tbl
export err=$?
if [[ $err -ne 0 ]] ; then
echo " File g2varswmo2.tbl file is missing."
exit $err
fi
-cp $FIXgempak/g2vcrdwmo2.tbl g2vcrdwmo2.tbl
+cp ${HOMEgfs}/gempak/fix/g2vcrdwmo2.tbl g2vcrdwmo2.tbl
export err=$?
if [[ $err -ne 0 ]] ; then
echo " File g2vcrdwmo2.tbl file is missing."
exit $err
fi
-cp $FIXgempak/g2varsncep1.tbl g2varsncep1.tbl
+cp ${HOMEgfs}/gempak/fix/g2varsncep1.tbl g2varsncep1.tbl
export err=$?
if [[ $err -ne 0 ]] ; then
echo " File g2varsncep1.tbl file is missing."
exit $err
fi
-cp $FIXgempak/g2vcrdncep1.tbl g2vcrdncep1.tbl
+cp ${HOMEgfs}/gempak/fix/g2vcrdncep1.tbl g2vcrdncep1.tbl
export err=$?
if [[ $err -ne 0 ]] ; then
echo " File g2vcrdncep1.tbl file is missing."
diff --git a/scripts/exgdas_atmos_verfrad.sh b/scripts/exgdas_atmos_verfrad.sh
index 50320ffba1..1ff01ccfa9 100755
--- a/scripts/exgdas_atmos_verfrad.sh
+++ b/scripts/exgdas_atmos_verfrad.sh
@@ -37,9 +37,9 @@ if [[ -s ${radstat} && -s ${biascr} ]]; then
#------------------------------------------------------------------
# SATYPE is the list of expected satellite/instrument sources
- # in the radstat file. It should be stored in the $TANKverf
- # directory. If it isn't there then use the $FIXgdas copy. In all
- # cases write it back out to the radmon.$PDY directory. Add any
+ # in the radstat file. It should be stored in the $TANKverf
+ # directory. If it isn't there then use the gdas fix copy. In all
+ # cases write it back out to the radmon.$PDY directory. Add any
# new sources to the list before writing back out.
#------------------------------------------------------------------
diff --git a/scripts/exgdas_enkf_ecen.sh b/scripts/exgdas_enkf_ecen.sh
index c20d1dec78..59021debaa 100755
--- a/scripts/exgdas_enkf_ecen.sh
+++ b/scripts/exgdas_enkf_ecen.sh
@@ -76,8 +76,6 @@ CYCLESH=${CYCLESH:-$HOMEgfs/ush/global_cycle.sh}
export CYCLEXEC=${CYCLEXEC:-$HOMEgfs/exec/global_cycle}
APRUN_CYCLE=${APRUN_CYCLE:-${APRUN:-""}}
NTHREADS_CYCLE=${NTHREADS_CYCLE:-${NTHREADS:-1}}
-export FIXorog=${FIXorog:-$HOMEgfs/fix/orog}
-export FIXam=${FIXam:-$HOMEgfs/fix/am}
export CYCLVARS=${CYCLVARS:-"FSNOL=-2.,FSNOS=99999.,"}
export FHOUR=${FHOUR:-0}
export DELTSFC=${DELTSFC:-6}
@@ -241,7 +239,7 @@ if [ $RECENTER_ENKF = "YES" ]; then
$NLN $ATMANL_GSI atmanl_gsi
$NLN $ATMANL_GSI_ENSRES atmanl_gsi_ensres
- SIGLEVEL=${SIGLEVEL:-${FIXam}/global_hyblev.l${LEVS}.txt}
+ SIGLEVEL=${SIGLEVEL:-${FIXgfs}/am/global_hyblev.l${LEVS}.txt}
$NLN $CHGRESNC chgres.x
chgresnml=chgres_nc_gauss.nml
nmltitle=chgres
diff --git a/scripts/exgdas_enkf_post.sh b/scripts/exgdas_enkf_post.sh
index 86ab9071a4..f240ae561d 100755
--- a/scripts/exgdas_enkf_post.sh
+++ b/scripts/exgdas_enkf_post.sh
@@ -34,7 +34,7 @@ SENDDBN=${SENDDBN:-"NO"}
# Fix files
LEVS=${LEVS:-64}
-HYBENSMOOTH=${HYBENSMOOTH:-$FIXgsi/global_hybens_smoothinfo.l${LEVS}.txt}
+HYBENSMOOTH=${HYBENSMOOTH:-${FIXgfs}/gsi/global_hybens_smoothinfo.l${LEVS}.txt}
# Executables.
GETATMENSMEANEXEC=${GETATMENSMEANEXEC:-$HOMEgfs/exec/getsigensmeanp_smooth.x}
diff --git a/scripts/exgdas_enkf_sfc.sh b/scripts/exgdas_enkf_sfc.sh
index 81d68fb9fe..085ab35351 100755
--- a/scripts/exgdas_enkf_sfc.sh
+++ b/scripts/exgdas_enkf_sfc.sh
@@ -54,8 +54,6 @@ CYCLESH=${CYCLESH:-$HOMEgfs/ush/global_cycle.sh}
export CYCLEXEC=${CYCLEXEC:-$HOMEgfs/exec/global_cycle}
APRUN_CYCLE=${APRUN_CYCLE:-${APRUN:-""}}
NTHREADS_CYCLE=${NTHREADS_CYCLE:-${NTHREADS:-1}}
-export FIXorog=${FIXorog:-$HOMEgfs/fix/orog}
-export FIXam=${FIXam:-$HOMEgfs/fix/am}
export CYCLVARS=${CYCLVARS:-"FSNOL=-2.,FSNOS=99999.,"}
export FHOUR=${FHOUR:-0}
export DELTSFC=${DELTSFC:-6}
@@ -152,8 +150,8 @@ if [ $DOIAU = "YES" ]; then
"${DATA}/fnbgsi.${cmem}"
${NLN} "${COM_ATMOS_RESTART_MEM}/${bPDY}.${bcyc}0000.sfcanl_data.tile${n}.nc" \
"${DATA}/fnbgso.${cmem}"
- ${NLN} "${FIXorog}/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.${cmem}"
- ${NLN} "${FIXorog}/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/fnorog.${cmem}"
+ ${NLN} "${FIXgfs}/orog/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.${cmem}"
+ ${NLN} "${FIXgfs}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/fnorog.${cmem}"
done
@@ -188,8 +186,8 @@ if [ $DOSFCANL_ENKF = "YES" ]; then
"${DATA}/fnbgsi.${cmem}"
${NLN} "${COM_ATMOS_RESTART_MEM}/${PDY}.${cyc}0000.sfcanl_data.tile${n}.nc" \
"${DATA}/fnbgso.${cmem}"
- ${NLN} "${FIXorog}/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.${cmem}"
- ${NLN} "${FIXorog}/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/fnorog.${cmem}"
+ ${NLN} "${FIXgfs}/orog/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.${cmem}"
+ ${NLN} "${FIXgfs}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/fnorog.${cmem}"
done
diff --git a/scripts/exgdas_enkf_update.sh b/scripts/exgdas_enkf_update.sh
index 1f11026ac4..60d2592e4d 100755
--- a/scripts/exgdas_enkf_update.sh
+++ b/scripts/exgdas_enkf_update.sh
@@ -105,14 +105,14 @@ else
fi
LATA_ENKF=${LATA_ENKF:-$LATB_ENKF}
LONA_ENKF=${LONA_ENKF:-$LONB_ENKF}
-SATANGL=${SATANGL:-${FIXgsi}/global_satangbias.txt}
-SATINFO=${SATINFO:-${FIXgsi}/global_satinfo.txt}
-CONVINFO=${CONVINFO:-${FIXgsi}/global_convinfo.txt}
-OZINFO=${OZINFO:-${FIXgsi}/global_ozinfo.txt}
-SCANINFO=${SCANINFO:-${FIXgsi}/global_scaninfo.txt}
-HYBENSINFO=${HYBENSINFO:-${FIXgsi}/global_hybens_info.l${LEVS_ENKF}.txt}
-ANAVINFO=${ANAVINFO:-${FIXgsi}/global_anavinfo.l${LEVS_ENKF}.txt}
-VLOCALEIG=${VLOCALEIG:-${FIXgsi}/vlocal_eig_l${LEVS_ENKF}.dat}
+SATANGL=${SATANGL:-${FIXgfs}/gsi/global_satangbias.txt}
+SATINFO=${SATINFO:-${FIXgfs}/gsi/global_satinfo.txt}
+CONVINFO=${CONVINFO:-${FIXgfs}/gsi/global_convinfo.txt}
+OZINFO=${OZINFO:-${FIXgfs}/gsi/global_ozinfo.txt}
+SCANINFO=${SCANINFO:-${FIXgfs}/gsi/global_scaninfo.txt}
+HYBENSINFO=${HYBENSINFO:-${FIXgfs}/gsi/global_hybens_info.l${LEVS_ENKF}.txt}
+ANAVINFO=${ANAVINFO:-${FIXgfs}/gsi/global_anavinfo.l${LEVS_ENKF}.txt}
+VLOCALEIG=${VLOCALEIG:-${FIXgfs}/gsi/vlocal_eig_l${LEVS_ENKF}.dat}
ENKF_SUFFIX="s"
[[ $SMOOTH_ENKF = "NO" ]] && ENKF_SUFFIX=""
diff --git a/scripts/exgfs_atmos_gempak_meta.sh b/scripts/exgfs_atmos_gempak_meta.sh
index 04f4f1fc5c..ad5ef823fd 100755
--- a/scripts/exgfs_atmos_gempak_meta.sh
+++ b/scripts/exgfs_atmos_gempak_meta.sh
@@ -73,14 +73,14 @@ do
if [ $do_all -eq 1 ] ; then
do_all=0
- awk '{print $1}' $FIXgempak/gfs_meta > $DATA/tmpscript
+ awk '{print $1}' ${HOMEgfs}/gempak/fix/gfs_meta > $DATA/tmpscript
else
#
# Do not try to grep out 12, it will grab the 12 from 126.
# This will work as long as we don't need 12 fhr metafiles
#
if [ $fhr -ne 12 ] ; then
- grep $fhr $FIXgempak/gfs_meta |awk -F" [0-9]" '{print $1}' > $DATA/tmpscript
+ grep $fhr ${HOMEgfs}/gempak/fix/gfs_meta |awk -F" [0-9]" '{print $1}' > $DATA/tmpscript
fi
fi
diff --git a/scripts/exgfs_atmos_goes_nawips.sh b/scripts/exgfs_atmos_goes_nawips.sh
index 583593fef8..137feec7c2 100755
--- a/scripts/exgfs_atmos_goes_nawips.sh
+++ b/scripts/exgfs_atmos_goes_nawips.sh
@@ -15,13 +15,13 @@ source "$HOMEgfs/ush/preamble.sh"
cd $DATA
-cp $FIXgempak/g2varswmo2.tbl g2varswmo2.tbl
-cp $FIXgempak/g2vcrdwmo2.tbl g2vcrdwmo2.tbl
-cp $FIXgempak/g2varsncep1.tbl g2varsncep1.tbl
-cp $FIXgempak/g2vcrdncep1.tbl g2vcrdncep1.tbl
+cp ${HOMEgfs}/gempak/fix/g2varswmo2.tbl g2varswmo2.tbl
+cp ${HOMEgfs}/gempak/fix/g2vcrdwmo2.tbl g2vcrdwmo2.tbl
+cp ${HOMEgfs}/gempak/fix/g2varsncep1.tbl g2varsncep1.tbl
+cp ${HOMEgfs}/gempak/fix/g2vcrdncep1.tbl g2vcrdncep1.tbl
#
-# NAGRIB_TABLE=$FIXgempak/nagrib.tbl
+# NAGRIB_TABLE=${HOMEgfs}/gempak/fix/nagrib.tbl
NAGRIB=$GEMEXE/nagrib2
#
diff --git a/scripts/exgfs_atmos_nawips.sh b/scripts/exgfs_atmos_nawips.sh
index ebb509d392..d03ec9cef6 100755
--- a/scripts/exgfs_atmos_nawips.sh
+++ b/scripts/exgfs_atmos_nawips.sh
@@ -50,10 +50,10 @@ while (( fhcnt <= fend )) ; do
if mkdir "lock.${fhcnt}" ; then
cd "lock.${fhcnt}" || exit 1
- cp "${FIXgempak}/g2varswmo2.tbl" "g2varswmo2.tbl"
- cp "${FIXgempak}/g2vcrdwmo2.tbl" "g2vcrdwmo2.tbl"
- cp "${FIXgempak}/g2varsncep1.tbl" "g2varsncep1.tbl"
- cp "${FIXgempak}/g2vcrdncep1.tbl" "g2vcrdncep1.tbl"
+ cp "${HOMEgfs}/gempak/fix/g2varswmo2.tbl" "g2varswmo2.tbl"
+ cp "${HOMEgfs}/gempak/fix/g2vcrdwmo2.tbl" "g2vcrdwmo2.tbl"
+ cp "${HOMEgfs}/gempak/fix/g2varsncep1.tbl" "g2varsncep1.tbl"
+ cp "${HOMEgfs}/gempak/fix/g2vcrdncep1.tbl" "g2vcrdncep1.tbl"
fhr=$(printf "%03d" "${fhcnt}")
diff --git a/scripts/exgfs_wave_init.sh b/scripts/exgfs_wave_init.sh
index ce903a2284..22da372a2a 100755
--- a/scripts/exgfs_wave_init.sh
+++ b/scripts/exgfs_wave_init.sh
@@ -94,16 +94,16 @@ source "${HOMEgfs}/ush/preamble.sh"
echo " Mod def file for ${grdID} not found in ${COM_WAVE_PREP}. Setting up to generate ..."
echo ' '
set_trace
- if [ -f $FIXwave/ww3_grid.inp.$grdID ]
+ if [ -f ${FIXgfs}/wave/ww3_grid.inp.$grdID ]
then
- cp $FIXwave/ww3_grid.inp.$grdID ww3_grid.inp.$grdID
+ cp ${FIXgfs}/wave/ww3_grid.inp.$grdID ww3_grid.inp.$grdID
fi
if [ -f ww3_grid.inp.$grdID ]
then
set +x
echo ' '
- echo " ww3_grid.inp.$grdID copied ($FIXwave/ww3_grid.inp.$grdID)."
+ echo " ww3_grid.inp.$grdID copied (${FIXgfs}/wave/ww3_grid.inp.$grdID)."
echo ' '
set_trace
else
@@ -118,6 +118,13 @@ source "${HOMEgfs}/ush/preamble.sh"
err=2;export err;${errchk}
fi
+
+ if [ -f ${FIXgfs}/wave/${grdID}.msh ]
+ then
+ cp "${FIXgfs}/wave/${grdID}.msh" "${grdID}.msh"
+ fi
+ #TO DO: how do we say "it's unstructured, and therefore need to have error check here"
+
[[ ! -d "${COM_WAVE_PREP}" ]] && mkdir -m 775 -p "${COM_WAVE_PREP}"
if [ ${CFP_MP:-"NO"} = "YES" ]; then
echo "$nmoddef $USHwave/wave_grid_moddef.sh $grdID > $grdID.out 2>&1" >> cmdfile
@@ -166,7 +173,7 @@ source "${HOMEgfs}/ush/preamble.sh"
exit=$?
fi
- if [ "$exit" != '0' ]
+ if [[ "$exit" != '0' ]]
then
set +x
echo ' '
@@ -195,9 +202,9 @@ source "${HOMEgfs}/ush/preamble.sh"
echo '********************************************** '
echo '*** FATAL ERROR : NO MODEL DEFINITION FILE *** '
echo '********************************************** '
- echo " grdID = $grdID"
+ echo " grdID = ${grdID}"
echo ' '
- sed "s/^/$grdID.out : /g" $grdID.out
+ sed "s/^/${grdID}.out : /g" "${grdID}.out"
set_trace
err=3;export err;${errchk}
fi
diff --git a/scripts/exgfs_wave_nawips.sh b/scripts/exgfs_wave_nawips.sh
index 63690ff1b0..9145a30a2f 100755
--- a/scripts/exgfs_wave_nawips.sh
+++ b/scripts/exgfs_wave_nawips.sh
@@ -24,7 +24,6 @@ export FHOUT_HF_WAV=${FHOUT_HF_WAV:-3}
export maxtries=${maxtries:-720}
export cycle=${cycle:-t${cyc}z}
export GEMwave=${GEMwave:-${HOMEgfs}/gempak}
-export FIXwave=${FIXwave:-${HOMEgfs}/fix/wave}
export DATA=${DATA:-${DATAROOT:?}/${jobid}}
if [ ! -d ${DATA} ];then
mkdir -p ${DATA}
diff --git a/scripts/exgfs_wave_post_pnt.sh b/scripts/exgfs_wave_post_pnt.sh
index a7aa957564..c085c48f30 100755
--- a/scripts/exgfs_wave_post_pnt.sh
+++ b/scripts/exgfs_wave_post_pnt.sh
@@ -156,7 +156,11 @@ source "$HOMEgfs/ush/preamble.sh"
cp -f $PARMwave/wave_${NET}.buoys buoy.loc.temp
if [ "$DOBNDPNT_WAV" = YES ]; then
#only do boundary points
- sed -n '/^\$.*/!p' buoy.loc.temp | grep IBP > buoy.loc
+ sed -n '/^\$.*/!p' buoy.loc.temp | grep IBP > buoy.loc || {
+ echo "WARNING: No boundary points found in buoy file ${PARMwave}/wave_${NET}.buoys"
+ echo " Ending job without doing anything."
+ exit 0
+ }
else
#exclude boundary points
sed -n '/^\$.*/!p' buoy.loc.temp | grep -v IBP > buoy.loc
diff --git a/scripts/exgfs_wave_prdgen_bulls.sh b/scripts/exgfs_wave_prdgen_bulls.sh
index 2e6cb2071b..bcad75a660 100755
--- a/scripts/exgfs_wave_prdgen_bulls.sh
+++ b/scripts/exgfs_wave_prdgen_bulls.sh
@@ -31,7 +31,6 @@ source "$HOMEgfs/ush/preamble.sh"
export DATA=${DATA:-${DATAROOT:?}/${job}.$$}
#export CODEwave=${CODEwave:-${PACKAGEROOT}/${NET}_code.${wave_code_ver}/${code_pkg}}
export EXECwave=${EXECwave:-$HOMEgfs/exec}
- export FIXwave=${FIXwave:-$HOMEgfs/fix}
export PARMwave=${PARMwave:-$HOMEgfs/parm/parm_wave}
export USHwave=${USHwave:-$HOMEgfs/ush}
#export EXECcode=${EXECcode:-CODEwave/exec}
diff --git a/scripts/exgfs_wave_prdgen_gridded.sh b/scripts/exgfs_wave_prdgen_gridded.sh
index b0cbc124ce..54efb639d6 100755
--- a/scripts/exgfs_wave_prdgen_gridded.sh
+++ b/scripts/exgfs_wave_prdgen_gridded.sh
@@ -31,7 +31,6 @@ source "$HOMEgfs/ush/preamble.sh"
export FHOUT_WAV=${FHOUT_WAV:-6} #from 72 to 180 inc=6
export FHOUT_HF_WAV=${FHOUT_HF_WAV:-3}
export maxtries=720
- export FIXwave=${FIXwave:-$HOMEgfs/fix/wave}
export PARMwave=${PARMwave:-$HOMEgfs/parm/parm_wave}
export USHwave=${USHwave:-$HOMEgfs/ush}
export cyc=${cyc:-00}
diff --git a/scripts/exgfs_wave_prep.sh b/scripts/exgfs_wave_prep.sh
index be006c1c85..f6cb610559 100755
--- a/scripts/exgfs_wave_prep.sh
+++ b/scripts/exgfs_wave_prep.sh
@@ -23,7 +23,7 @@
# #
# Update log #
# Mar2007 HTolman - Added NCO note on resources on mist/dew #
-# Apr2007 HTolman - Renaming mod_def files in $FIX_wave. #
+# Apr2007 HTolman - Renaming mod_def files in ${FIXgfs}/wave. #
# Mar2011 AChawla - Migrating to a vertical structure #
# Nov2012 JHAlves - Transitioning to WCOSS #
# Apr2019 JHAlves - Transitioning to GEFS workflow #
diff --git a/scripts/exglobal_archive.sh b/scripts/exglobal_archive.sh
index 2f7e3be972..833b06bd98 100755
--- a/scripts/exglobal_archive.sh
+++ b/scripts/exglobal_archive.sh
@@ -182,12 +182,12 @@ if [[ ${HPSSARCH} = "YES" || ${LOCALARCH} = "YES" ]]; then
targrp_list="${targrp_list} gfswave"
fi
- if [ "${DO_OCN}" = "YES" ]; then
- targrp_list="${targrp_list} ocn_ice_grib2_0p5 ocn_ice_grib2_0p25 ocn_2D ocn_3D ocn_xsect ocn_daily gfs_flux_1p00"
+ if [[ "${DO_OCN}" == "YES" ]]; then
+ targrp_list="${targrp_list} ocean_6hravg ocean_daily ocean_grib2 gfs_flux_1p00"
fi
- if [ "${DO_ICE}" = "YES" ]; then
- targrp_list="${targrp_list} ice"
+ if [[ "${DO_ICE}" == "YES" ]]; then
+ targrp_list="${targrp_list} ice_6hravg ice_grib2"
fi
# Aerosols
diff --git a/scripts/exglobal_atmos_analysis.sh b/scripts/exglobal_atmos_analysis.sh
index cb3c6467a1..b12046f9e2 100755
--- a/scripts/exglobal_atmos_analysis.sh
+++ b/scripts/exglobal_atmos_analysis.sh
@@ -289,21 +289,21 @@ else
fi
# GSI Fix files
-BERROR=${BERROR:-${FIXgsi}/Big_Endian/global_berror.l${LEVS}y${NLAT_A}.f77}
-SATANGL=${SATANGL:-${FIXgsi}/global_satangbias.txt}
-SATINFO=${SATINFO:-${FIXgsi}/global_satinfo.txt}
-RADCLOUDINFO=${RADCLOUDINFO:-${FIXgsi}/cloudy_radiance_info.txt}
-ATMSFILTER=${ATMSFILTER:-${FIXgsi}/atms_beamwidth.txt}
-ANAVINFO=${ANAVINFO:-${FIXgsi}/global_anavinfo.l${LEVS}.txt}
-CONVINFO=${CONVINFO:-${FIXgsi}/global_convinfo.txt}
-vqcdat=${vqcdat:-${FIXgsi}/vqctp001.dat}
-INSITUINFO=${INSITUINFO:-${FIXgsi}/global_insituinfo.txt}
-OZINFO=${OZINFO:-${FIXgsi}/global_ozinfo.txt}
-PCPINFO=${PCPINFO:-${FIXgsi}/global_pcpinfo.txt}
-AEROINFO=${AEROINFO:-${FIXgsi}/global_aeroinfo.txt}
-SCANINFO=${SCANINFO:-${FIXgsi}/global_scaninfo.txt}
-HYBENSINFO=${HYBENSINFO:-${FIXgsi}/global_hybens_info.l${LEVS}.txt}
-OBERROR=${OBERROR:-${FIXgsi}/prepobs_errtable.global}
+BERROR=${BERROR:-${FIXgfs}/gsi/Big_Endian/global_berror.l${LEVS}y${NLAT_A}.f77}
+SATANGL=${SATANGL:-${FIXgfs}/gsi/global_satangbias.txt}
+SATINFO=${SATINFO:-${FIXgfs}/gsi/global_satinfo.txt}
+RADCLOUDINFO=${RADCLOUDINFO:-${FIXgfs}/gsi/cloudy_radiance_info.txt}
+ATMSFILTER=${ATMSFILTER:-${FIXgfs}/gsi/atms_beamwidth.txt}
+ANAVINFO=${ANAVINFO:-${FIXgfs}/gsi/global_anavinfo.l${LEVS}.txt}
+CONVINFO=${CONVINFO:-${FIXgfs}/gsi/global_convinfo.txt}
+vqcdat=${vqcdat:-${FIXgfs}/gsi/vqctp001.dat}
+INSITUINFO=${INSITUINFO:-${FIXgfs}/gsi/global_insituinfo.txt}
+OZINFO=${OZINFO:-${FIXgfs}/gsi/global_ozinfo.txt}
+PCPINFO=${PCPINFO:-${FIXgfs}/gsi/global_pcpinfo.txt}
+AEROINFO=${AEROINFO:-${FIXgfs}/gsi/global_aeroinfo.txt}
+SCANINFO=${SCANINFO:-${FIXgfs}/gsi/global_scaninfo.txt}
+HYBENSINFO=${HYBENSINFO:-${FIXgfs}/gsi/global_hybens_info.l${LEVS}.txt}
+OBERROR=${OBERROR:-${FIXgfs}/gsi/prepobs_errtable.global}
# GSI namelist
SETUP=${SETUP:-""}
@@ -381,8 +381,8 @@ ${NLN} ${OBERROR} errtable
#If using correlated error, link to the covariance files
if [ ${USE_CORRELATED_OBERRS} == "YES" ]; then
if grep -q "Rcov" ${ANAVINFO} ; then
- if ls ${FIXgsi}/Rcov* 1> /dev/null 2>&1; then
- ${NLN} ${FIXgsi}/Rcov* ${DATA}
+ if ls ${FIXgfs}/gsi/Rcov* 1> /dev/null 2>&1; then
+ ${NLN} ${FIXgfs}/gsi/Rcov* ${DATA}
echo "using correlated obs error"
else
echo "FATAL ERROR: Satellite error covariance files (Rcov) are missing."
diff --git a/scripts/exglobal_atmos_analysis_calc.sh b/scripts/exglobal_atmos_analysis_calc.sh
index a2086aa927..a2228f0425 100755
--- a/scripts/exglobal_atmos_analysis_calc.sh
+++ b/scripts/exglobal_atmos_analysis_calc.sh
@@ -23,7 +23,6 @@ source "$HOMEgfs/ush/preamble.sh"
# Directories.
pwd=$(pwd)
-export FIXam=${FIXam:-$HOMEgfs/fix/am}
# Base variables
CDUMP=${CDUMP:-"gdas"}
diff --git a/scripts/exglobal_atmos_sfcanl.sh b/scripts/exglobal_atmos_sfcanl.sh
index 2997ac0d25..60869181f3 100755
--- a/scripts/exglobal_atmos_sfcanl.sh
+++ b/scripts/exglobal_atmos_sfcanl.sh
@@ -55,8 +55,6 @@ export SNOW_NUDGE_COEFF=${SNOW_NUDGE_COEFF:-'-2.'}
export CYCLVARS=${CYCLVARS:-""}
export FHOUR=${FHOUR:-0}
export DELTSFC=${DELTSFC:-6}
-export FIXam=${FIXam:-${HOMEgfs}/fix/am}
-export FIXorog=${FIXorog:-${HOMEgfs}/fix/orog}
# FV3 specific info (required for global_cycle)
export CASE=${CASE:-"C384"}
@@ -176,8 +174,8 @@ if [[ ${DOIAU} = "YES" ]]; then
"${COM_ATMOS_RESTART}/${bPDY}.${bcyc}0000.sfcanl_data.tile${n}.nc"
${NLN} "${COM_ATMOS_RESTART_PREV}/${bPDY}.${bcyc}0000.sfc_data.tile${n}.nc" "${DATA}/fnbgsi.00${n}"
${NLN} "${COM_ATMOS_RESTART}/${bPDY}.${bcyc}0000.sfcanl_data.tile${n}.nc" "${DATA}/fnbgso.00${n}"
- ${NLN} "${FIXorog}/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.00${n}"
- ${NLN} "${FIXorog}/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/fnorog.00${n}"
+ ${NLN} "${FIXgfs}/orog/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.00${n}"
+ ${NLN} "${FIXgfs}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/fnorog.00${n}"
done
export APRUNCY=${APRUN_CYCLE}
@@ -190,8 +188,8 @@ fi
# Update surface restarts at middle of window
for n in $(seq 1 ${ntiles}); do
- if [[ ${DO_JEDILANDDA:-"NO"} = "YES" ]]; then
- ${NCP} "${COM_LAND_ANALYSIS}/${PDY}.${cyc}0000.sfc_data.tile${n}.nc" \
+ if [[ ${DO_JEDISNOWDA:-"NO"} = "YES" ]]; then
+ ${NCP} "${COM_SNOW_ANALYSIS}/${PDY}.${cyc}0000.sfc_data.tile${n}.nc" \
"${COM_ATMOS_RESTART}/${PDY}.${cyc}0000.sfcanl_data.tile${n}.nc"
else
${NCP} "${COM_ATMOS_RESTART_PREV}/${PDY}.${cyc}0000.sfc_data.tile${n}.nc" \
@@ -199,8 +197,8 @@ for n in $(seq 1 ${ntiles}); do
fi
${NLN} "${COM_ATMOS_RESTART_PREV}/${PDY}.${cyc}0000.sfc_data.tile${n}.nc" "${DATA}/fnbgsi.00${n}"
${NLN} "${COM_ATMOS_RESTART}/${PDY}.${cyc}0000.sfcanl_data.tile${n}.nc" "${DATA}/fnbgso.00${n}"
- ${NLN} "${FIXorog}/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.00${n}"
- ${NLN} "${FIXorog}/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/fnorog.00${n}"
+ ${NLN} "${FIXgfs}/orog/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.00${n}"
+ ${NLN} "${FIXgfs}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/fnorog.00${n}"
done
export APRUNCY=${APRUN_CYCLE}
diff --git a/scripts/exglobal_atmos_vminmon.sh b/scripts/exglobal_atmos_vminmon.sh
index a4453dcf1a..aac63f3fb3 100755
--- a/scripts/exglobal_atmos_vminmon.sh
+++ b/scripts/exglobal_atmos_vminmon.sh
@@ -44,15 +44,15 @@ if [[ -s ${gsistat} ]]; then
#------------------------------------------------------------------
# Run the child sccripts.
#------------------------------------------------------------------
- "${USHgfs}/minmon_xtrct_costs.pl" "${MINMON_SUFFIX}" "${PDY}" "${cyc}" "${gsistat}" dummy
+ "${USHgfs}/minmon_xtrct_costs.pl" "${MINMON_SUFFIX}" "${PDY}" "${cyc}" "${gsistat}"
rc_costs=$?
echo "rc_costs = ${rc_costs}"
- "${USHgfs}/minmon_xtrct_gnorms.pl" "${MINMON_SUFFIX}" "${PDY}" "${cyc}" "${gsistat}" dummy
+ "${USHgfs}/minmon_xtrct_gnorms.pl" "${MINMON_SUFFIX}" "${PDY}" "${cyc}" "${gsistat}"
rc_gnorms=$?
echo "rc_gnorms = ${rc_gnorms}"
- "${USHgfs}/minmon_xtrct_reduct.pl" "${MINMON_SUFFIX}" "${PDY}" "${cyc}" "${gsistat}" dummy
+ "${USHgfs}/minmon_xtrct_reduct.pl" "${MINMON_SUFFIX}" "${PDY}" "${cyc}" "${gsistat}"
rc_reduct=$?
echo "rc_reduct = ${rc_reduct}"
diff --git a/scripts/exglobal_forecast.sh b/scripts/exglobal_forecast.sh
index eebc9e59c3..c548b827e5 100755
--- a/scripts/exglobal_forecast.sh
+++ b/scripts/exglobal_forecast.sh
@@ -38,19 +38,19 @@
## Restart files:
##
## Fix files:
-## 1. computing grid, $FIXorog/$CASE/${CASE}_grid.tile${n}.nc
-## 2. orography data, $FIXorog/$CASE/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc
-## 3. mosaic data, $FIXorog/$CASE/${CASE}_mosaic.nc
-## 4. Global O3 data, $FIXam/${O3FORC}
-## 5. Global H2O data, $FIXam/${H2OFORC}
-## 6. Global solar constant data, $FIXam/global_solarconstant_noaa_an.txt
-## 7. Global surface emissivity, $FIXam/global_sfc_emissivity_idx.txt
-## 8. Global CO2 historical data, $FIXam/global_co2historicaldata_glob.txt
-## 8. Global CO2 monthly data, $FIXam/co2monthlycyc.txt
-## 10. Additional global CO2 data, $FIXam/fix_co2_proj/global_co2historicaldata
+## 1. computing grid, ${FIXgfs}/orog/$CASE/${CASE}_grid.tile${n}.nc
+## 2. orography data, ${FIXgfs}/orog/$CASE/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc
+## 3. mosaic data, ${FIXgfs}/orog/$CASE/${CASE}_mosaic.nc
+## 4. Global O3 data, ${FIXgfs}/am/${O3FORC}
+## 5. Global H2O data, ${FIXgfs}/am/${H2OFORC}
+## 6. Global solar constant data, ${FIXgfs}/am/global_solarconstant_noaa_an.txt
+## 7. Global surface emissivity, ${FIXgfs}/am/global_sfc_emissivity_idx.txt
+## 8. Global CO2 historical data, ${FIXgfs}/am/global_co2historicaldata_glob.txt
+## 8. Global CO2 monthly data, ${FIXgfs}/am/co2monthlycyc.txt
+## 10. Additional global CO2 data, ${FIXgfs}/am/fix_co2_proj/global_co2historicaldata
## 11. Climatological aerosol global distribution
-## $FIXam/global_climaeropac_global.txt
-## 12. Monthly volcanic forcing $FIXam/global_volcanic_aerosols_YYYY-YYYY.txt
+## ${FIXgfs}/am/global_climaeropac_global.txt
+## 12. Monthly volcanic forcing ${FIXgfs}/am/global_volcanic_aerosols_YYYY-YYYY.txt
##
## Data output (location, name)
## If quilting=true and output grid is gaussian grid:
@@ -105,9 +105,11 @@ common_predet
echo "MAIN: Loading variables before determination of run type"
FV3_predet
+[[ ${cplflx} = .true. ]] && CMEPS_predet
[[ ${cplflx} = .true. ]] && MOM6_predet
[[ ${cplwav} = .true. ]] && WW3_predet
[[ ${cplice} = .true. ]] && CICE_predet
+[[ ${cplchm} = .true. ]] && GOCART_predet
echo "MAIN: Variables before determination of run type loaded"
echo "MAIN: Determining run type"
@@ -119,6 +121,7 @@ echo "MAIN: RUN Type Determined"
echo "MAIN: Post-determination set up of run type"
FV3_postdet
+[[ ${cplflx} = .true. ]] && CMEPS_postdet
[[ ${cplflx} = .true. ]] && MOM6_postdet
[[ ${cplwav} = .true. ]] && WW3_postdet
[[ ${cplice} = .true. ]] && CICE_postdet
@@ -154,6 +157,7 @@ ${ERRSCRIPT} || exit "${err}"
FV3_out
[[ ${cplflx} = .true. ]] && MOM6_out
+[[ ${cplflx} = .true. ]] && CMEPS_out
[[ ${cplwav} = .true. ]] && WW3_out
[[ ${cplice} = .true. ]] && CICE_out
[[ ${cplchm} = .true. ]] && GOCART_out
diff --git a/scripts/exglobal_oceanice_products.py b/scripts/exglobal_oceanice_products.py
new file mode 100755
index 0000000000..0f8e2e0d6d
--- /dev/null
+++ b/scripts/exglobal_oceanice_products.py
@@ -0,0 +1,52 @@
+#!/usr/bin/env python3
+
+import os
+
+from wxflow import AttrDict, Logger, logit, cast_strdict_as_dtypedict
+from pygfs.task.oceanice_products import OceanIceProducts
+
+# initialize root logger
+logger = Logger(level=os.environ.get("LOGGING_LEVEL", "DEBUG"), colored_log=True)
+
+
+@logit(logger)
+def main():
+
+ config = cast_strdict_as_dtypedict(os.environ)
+
+ # Instantiate the OceanIce object
+ oceanice = OceanIceProducts(config)
+
+ # Pull out all the configuration keys needed to run the rest of steps
+ keys = ['HOMEgfs', 'DATA', 'current_cycle', 'RUN', 'NET',
+ 'COM_OCEAN_HISTORY', 'COM_OCEAN_GRIB',
+ 'COM_ICE_HISTORY', 'COM_ICE_GRIB',
+ 'APRUN_OCNICEPOST',
+ 'component', 'forecast_hour', 'valid_datetime', 'avg_period',
+ 'model_grid', 'product_grids', 'oceanice_yaml']
+ oceanice_dict = AttrDict()
+ for key in keys:
+ oceanice_dict[key] = oceanice.task_config[key]
+
+ # Initialize the DATA/ directory; copy static data
+ oceanice.initialize(oceanice_dict)
+
+ for grid in oceanice_dict.product_grids:
+
+ logger.info(f"Processing {grid} grid")
+
+ # Configure DATA/ directory for execution; prepare namelist etc.
+ oceanice.configure(oceanice_dict, grid)
+
+ # Run the oceanice post executable to interpolate and create grib2 files
+ oceanice.execute(oceanice_dict, grid)
+
+ # Subset raw model data to create netCDF products
+ oceanice.subset(oceanice_dict)
+
+ # Copy processed output from execute and subset
+ oceanice.finalize(oceanice_dict)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/scripts/exglobal_prep_land_obs.py b/scripts/exglobal_prep_snow_obs.py
similarity index 59%
rename from scripts/exglobal_prep_land_obs.py
rename to scripts/exglobal_prep_snow_obs.py
index 3594771c8a..5107d9c935 100755
--- a/scripts/exglobal_prep_land_obs.py
+++ b/scripts/exglobal_prep_snow_obs.py
@@ -1,12 +1,12 @@
#!/usr/bin/env python3
-# exglobal_land_analysis_prepare.py
-# This script creates a LandAnalysis object
+# exglobal_prep_snow_obs.py
+# This script creates a SnowAnalysis object
# and runs the prepare_GTS and prepare_IMS method
# which perform the pre-processing for GTS and IMS data
import os
from wxflow import Logger, cast_strdict_as_dtypedict
-from pygfs.task.land_analysis import LandAnalysis
+from pygfs.task.snow_analysis import SnowAnalysis
# Initialize root logger
@@ -18,8 +18,8 @@
# Take configuration from environment and cast it as python dictionary
config = cast_strdict_as_dtypedict(os.environ)
- # Instantiate the land prepare task
- LandAnl = LandAnalysis(config)
- LandAnl.prepare_GTS()
- if f"{ LandAnl.runtime_config.cyc }" == '18':
- LandAnl.prepare_IMS()
+ # Instantiate the snow prepare task
+ SnowAnl = SnowAnalysis(config)
+ SnowAnl.prepare_GTS()
+ if f"{ SnowAnl.runtime_config.cyc }" == '18':
+ SnowAnl.prepare_IMS()
diff --git a/scripts/exglobal_land_analysis.py b/scripts/exglobal_snow_analysis.py
similarity index 66%
rename from scripts/exglobal_land_analysis.py
rename to scripts/exglobal_snow_analysis.py
index 70141475b0..fe050f5af5 100755
--- a/scripts/exglobal_land_analysis.py
+++ b/scripts/exglobal_snow_analysis.py
@@ -1,12 +1,12 @@
#!/usr/bin/env python3
-# exglobal_land_analysis.py
-# This script creates an LandAnalysis class
+# exglobal_snow_analysis.py
+# This script creates an SnowAnalysis class
# and runs the initialize, execute and finalize methods
-# for a global Land Snow Depth analysis
+# for a global Snow Depth analysis
import os
from wxflow import Logger, cast_strdict_as_dtypedict
-from pygfs.task.land_analysis import LandAnalysis
+from pygfs.task.snow_analysis import SnowAnalysis
# Initialize root logger
logger = Logger(level=os.environ.get("LOGGING_LEVEL", "DEBUG"), colored_log=True)
@@ -17,8 +17,8 @@
# Take configuration from environment and cast it as python dictionary
config = cast_strdict_as_dtypedict(os.environ)
- # Instantiate the land analysis task
- anl = LandAnalysis(config)
+ # Instantiate the snow analysis task
+ anl = SnowAnalysis(config)
anl.initialize()
anl.execute()
anl.finalize()
diff --git a/scripts/run_reg2grb2.sh b/scripts/run_reg2grb2.sh
deleted file mode 100755
index ab2c80043e..0000000000
--- a/scripts/run_reg2grb2.sh
+++ /dev/null
@@ -1,72 +0,0 @@
-#! /usr/bin/env bash
-
-source "${HOMEgfs}/ush/preamble.sh"
-
-#requires grib_util module
-
-MOM6REGRID=${MOM6REGRID:-${HOMEgfs}}
-export mask_file="${MOM6REGRID}/fix/reg2grb2/mask.0p25x0p25.grb2"
-
-# offline testing:
-#export DATA=
-#export icefile=$DATA/DATA0p5/icer2012010106.01.2012010100_0p5x0p5.nc
-#export ocnfile=$DATA/DATA0p5/ocnr2012010106.01.2012010100_0p5x0p5.nc
-#export outfile=$DATA/DATA0p5/out/ocnh2012010106.01.2012010100.grb2
-#
-# workflow testing:
-export icefile="icer${VDATE}.${ENSMEM}.${IDATE}_0p25x0p25_CICE.nc"
-export ocnfile="ocnr${VDATE}.${ENSMEM}.${IDATE}_0p25x0p25_MOM6.nc"
-export outfile="ocn_ice${VDATE}.${ENSMEM}.${IDATE}_0p25x0p25.grb2"
-export outfile0p5="ocn_ice${VDATE}.${ENSMEM}.${IDATE}_0p5x0p5.grb2"
-
-export mfcstcpl=${mfcstcpl:-1}
-export IGEN_OCNP=${IGEN_OCNP:-197}
-
-# PT This is the forecast date
-export year=${VDATE:0:4}
-export month=${VDATE:4:2}
-export day=${VDATE:6:2}
-export hour=${VDATE:8:2}
-
-# PT This is the initialization date
-export syear=${IDATE:0:4}
-export smonth=${IDATE:4:2}
-export sday=${IDATE:6:2}
-export shour=${IDATE:8:2}
-
-# PT Need to get this from above - could be 6 or 1 hour
-export hh_inc_ocn=6
-#
-# set for 1p0 lat-lon
-#export im=360
-#export jm=181
-# export km=40
-#export imo=360
-#export jmo=181
-#
-# set for 0p5 lat-lon
-#export im=720
-#export jm=361
-#export km=40
-#export imo=720
-#export jmo=361
-#
-# set for 0p25 lat-lon
-export im=1440
-export jm=721
-export imo=1440
-export jmo=721
-export km=40
-
-export flats=-90.
-export flatn=90.
-export flonw=0.0
-export flone=359.75
-
-ln -sf "${mask_file}" ./iceocnpost.g2
-${executable} > "reg2grb2.${VDATE}.${IDATE}.out"
-
-# interpolated from 0p25 to 0p5 grid
-grid2p05="0 6 0 0 0 0 0 0 720 361 0 0 90000000 0 48 -90000000 359500000 500000 500000 0"
-${COPYGB2} -g "${grid2p05}" -i0 -x "${outfile}" "${outfile0p5}"
-
diff --git a/scripts/run_regrid.sh b/scripts/run_regrid.sh
deleted file mode 100755
index 103e9a759e..0000000000
--- a/scripts/run_regrid.sh
+++ /dev/null
@@ -1,27 +0,0 @@
-#! /usr/bin/env bash
-
-source "${HOMEgfs}/ush/preamble.sh"
-
-MOM6REGRID="${MOM6REGRID:-${HOMEgfs}}"
-export EXEC_DIR="${MOM6REGRID}/exec"
-export USH_DIR="${MOM6REGRID}/ush"
-export COMOUTocean="${COM_OCEAN_HISTORY}"
-export COMOUTice="${COM_ICE_HISTORY}"
-export IDATE="${IDATE}"
-export VDATE="${VDATE}"
-export ENSMEM="${ENSMEM}"
-export FHR="${fhr}"
-export DATA="${DATA}"
-export FIXreg2grb2="${FIXreg2grb2}"
-
-###### DO NOT MODIFY BELOW UNLESS YOU KNOW WHAT YOU ARE DOING #######
-#Need NCL module to be loaded:
-echo "${NCARG_ROOT}"
-export NCL="${NCARG_ROOT}/bin/ncl"
-
-ls -alrt
-
-${NCL} "${USH_DIR}/icepost.ncl"
-${NCL} "${USH_DIR}/ocnpost.ncl"
-#####################################################################
-
diff --git a/sorc/build_all.sh b/sorc/build_all.sh
index c337374428..261c245142 100755
--- a/sorc/build_all.sh
+++ b/sorc/build_all.sh
@@ -16,7 +16,7 @@ function _usage() {
Builds all of the global-workflow components by calling the individual build
scripts in sequence.
-Usage: ${BASH_SOURCE[0]} [-a UFS_app][-c build_config][-h][-j n][-v]
+Usage: ${BASH_SOURCE[0]} [-a UFS_app][-c build_config][-h][-j n][-v][-w]
-a UFS_app:
Build a specific UFS app instead of the default
-g:
@@ -29,6 +29,8 @@ Usage: ${BASH_SOURCE[0]} [-a UFS_app][-c build_config][-h][-j n][-v]
Build UFS-DA
-v:
Execute all build scripts with -v option to turn on verbose where supported
+ -w:
+ Use unstructured wave grid
EOF
exit 1
}
@@ -40,10 +42,11 @@ _build_ufs_opt=""
_build_ufsda="NO"
_build_gsi="NO"
_verbose_opt=""
+_wave_unst=""
_build_job_max=20
# Reset option counter in case this script is sourced
OPTIND=1
-while getopts ":a:ghj:uv" option; do
+while getopts ":a:ghj:uvw" option; do
case "${option}" in
a) _build_ufs_opt+="-a ${OPTARG} ";;
g) _build_gsi="YES" ;;
@@ -51,6 +54,7 @@ while getopts ":a:ghj:uv" option; do
j) _build_job_max="${OPTARG} ";;
u) _build_ufsda="YES" ;;
v) _verbose_opt="-v";;
+ w) _wave_unst="-w";;
:)
echo "[${BASH_SOURCE[0]}]: ${option} requires an argument"
_usage
@@ -113,23 +117,23 @@ declare -A build_opts
big_jobs=0
build_jobs["ufs"]=8
big_jobs=$((big_jobs+1))
-build_opts["ufs"]="${_verbose_opt} ${_build_ufs_opt}"
+build_opts["ufs"]="${_wave_unst} ${_verbose_opt} ${_build_ufs_opt}"
-build_jobs["upp"]=6 # The UPP is hardcoded to use 6 cores
+build_jobs["upp"]=2
build_opts["upp"]=""
-build_jobs["ufs_utils"]=3
+build_jobs["ufs_utils"]=2
build_opts["ufs_utils"]="${_verbose_opt}"
build_jobs["gfs_utils"]=1
build_opts["gfs_utils"]="${_verbose_opt}"
-build_jobs["ww3prepost"]=3
-build_opts["ww3prepost"]="${_verbose_opt} ${_build_ufs_opt}"
+build_jobs["ww3prepost"]=2
+build_opts["ww3prepost"]="${_wave_unst} ${_verbose_opt} ${_build_ufs_opt}"
# Optional DA builds
if [[ "${_build_ufsda}" == "YES" ]]; then
- if [[ "${MACHINE_ID}" != "orion" && "${MACHINE_ID}" != "hera.intel" && "${MACHINE_ID}" != "hercules" ]]; then
+ if [[ "${MACHINE_ID}" != "orion" && "${MACHINE_ID}" != "hera" && "${MACHINE_ID}" != "hercules" ]]; then
echo "NOTE: The GDAS App is not supported on ${MACHINE_ID}. Disabling build."
else
build_jobs["gdas"]=8
@@ -142,7 +146,7 @@ if [[ "${_build_gsi}" == "YES" ]]; then
build_opts["gsi_enkf"]="${_verbose_opt}"
fi
if [[ "${_build_gsi}" == "YES" || "${_build_ufsda}" == "YES" ]] ; then
- build_jobs["gsi_utils"]=2
+ build_jobs["gsi_utils"]=1
build_opts["gsi_utils"]="${_verbose_opt}"
if [[ "${MACHINE_ID}" == "hercules" ]]; then
echo "NOTE: The GSI Monitor is not supported on Hercules. Disabling build."
diff --git a/sorc/build_ufs.sh b/sorc/build_ufs.sh
index 3e3f879f1a..24ee8c5f13 100755
--- a/sorc/build_ufs.sh
+++ b/sorc/build_ufs.sh
@@ -7,12 +7,13 @@ cwd=$(pwd)
APP="S2SWA"
CCPP_SUITES="FV3_GFS_v17_p8_ugwpv1,FV3_GFS_v17_coupled_p8_ugwpv1" # TODO: does the g-w need to build with all these CCPP_SUITES?
-while getopts ":da:j:v" option; do
+while getopts ":da:j:vw" option; do
case "${option}" in
d) BUILD_TYPE="DEBUG";;
a) APP="${OPTARG}";;
j) BUILD_JOBS="${OPTARG}";;
v) export BUILD_VERBOSE="YES";;
+ w) PDLIB="ON";;
:)
echo "[${BASH_SOURCE[0]}]: ${option} requires an argument"
;;
@@ -28,6 +29,7 @@ source "./tests/detect_machine.sh"
source "./tests/module-setup.sh"
MAKE_OPT="-DAPP=${APP} -D32BIT=ON -DCCPP_SUITES=${CCPP_SUITES}"
+[[ ${PDLIB:-"OFF"} = "ON" ]] && MAKE_OPT+=" -DPDLIB=ON"
[[ ${BUILD_TYPE:-"Release"} = "DEBUG" ]] && MAKE_OPT+=" -DDEBUG=ON"
COMPILE_NR=0
CLEAN_BEFORE=YES
diff --git a/sorc/build_upp.sh b/sorc/build_upp.sh
index a55e96ebc8..8a2e1f6fcd 100755
--- a/sorc/build_upp.sh
+++ b/sorc/build_upp.sh
@@ -6,9 +6,10 @@ cd "${script_dir}" || exit 1
OPTIND=1
_opts=""
-while getopts ":dv" option; do
+while getopts ":dj:v" option; do
case "${option}" in
d) _opts+="-d ";;
+ j) export BUILD_JOBS="${OPTARG}" ;;
v) _opts+="-v ";;
:)
echo "[${BASH_SOURCE[0]}]: ${option} requires an argument"
@@ -27,4 +28,4 @@ fi
cd ufs_model.fd/FV3/upp/tests
# shellcheck disable=SC2086
-./compile_upp.sh ${_opts}
+BUILD_JOBS=${BUILD_JOBS:-8} ./compile_upp.sh ${_opts}
diff --git a/sorc/build_ww3prepost.sh b/sorc/build_ww3prepost.sh
index 919afaacb3..19cdba98da 100755
--- a/sorc/build_ww3prepost.sh
+++ b/sorc/build_ww3prepost.sh
@@ -6,12 +6,15 @@ cd "${script_dir}" || exit 1
# Default settings
APP="S2SWA"
+PDLIB="OFF"
-while getopts ":j:a:v" option; do
+while getopts ":j:a:dvw" option; do
case "${option}" in
a) APP="${OPTARG}";;
+ d) BUILD_TYPE="DEBUG";;
j) BUILD_JOBS="${OPTARG}";;
v) export BUILD_VERBOSE="YES";;
+ w) PDLIB="ON";;
:)
echo "[${BASH_SOURCE[0]}]: ${option} requires an argument"
usage
@@ -23,14 +26,16 @@ while getopts ":j:a:v" option; do
esac
done
-
# Determine which switch to use
-if [[ "${APP}" == "ATMW" ]]; then
+if [[ "${APP}" == "ATMW" ]]; then
ww3switch="model/esmf/switch"
-else
- ww3switch="model/bin/switch_meshcap"
-fi
-
+else
+ if [[ "${PDLIB}" == "ON" ]]; then
+ ww3switch="model/bin/switch_meshcap_pdlib"
+ else
+ ww3switch="model/bin/switch_meshcap"
+ fi
+fi
# Check final exec folder exists
if [[ ! -d "../exec" ]]; then
@@ -64,6 +69,8 @@ mkdir -p "${path_build}" || exit 1
cd "${path_build}" || exit 1
echo "Forcing a SHRD build"
+buildswitch="${path_build}/switch"
+
cat "${SWITCHFILE}" > "${path_build}/tempswitch"
sed -e "s/DIST/SHRD/g"\
@@ -73,15 +80,21 @@ sed -e "s/DIST/SHRD/g"\
-e "s/MPI / /g"\
-e "s/B4B / /g"\
-e "s/PDLIB / /g"\
+ -e "s/SCOTCH / /g"\
+ -e "s/METIS / /g"\
-e "s/NOGRB/NCEP2/g"\
"${path_build}/tempswitch" > "${path_build}/switch"
rm "${path_build}/tempswitch"
-echo "Switch file is ${path_build}/switch with switches:"
-cat "${path_build}/switch"
+echo "Switch file is ${buildswitch} with switches:"
+cat "${buildswitch}"
+
+#define cmake build options
+MAKE_OPT="-DCMAKE_INSTALL_PREFIX=install"
+[[ ${BUILD_TYPE:-"Release"} = "DEBUG" ]] && MAKE_OPT+=" -DDEBUG=ON"
#Build executables:
-cmake "${WW3_DIR}" -DSWITCH="${path_build}/switch" -DCMAKE_INSTALL_PREFIX=install
+cmake "${WW3_DIR}" -DSWITCH="${buildswitch}" "${MAKE_OPT}"
rc=$?
if (( rc != 0 )); then
echo "Fatal error in cmake."
diff --git a/sorc/gdas.cd b/sorc/gdas.cd
index 831b08a3f9..10614c9855 160000
--- a/sorc/gdas.cd
+++ b/sorc/gdas.cd
@@ -1 +1 @@
-Subproject commit 831b08a3f947e8d743e2afbd6d38ecc4b0dec3b1
+Subproject commit 10614c9855042b436bb8c37c7e2faeead01259cb
diff --git a/sorc/gfs_utils.fd b/sorc/gfs_utils.fd
index 7d3b08e87c..4b7f6095d2 160000
--- a/sorc/gfs_utils.fd
+++ b/sorc/gfs_utils.fd
@@ -1 +1 @@
-Subproject commit 7d3b08e87c07cfa54079442d245ac7e9ab1cd9f4
+Subproject commit 4b7f6095d260b7fcd9c99c337454e170f1aa7f2f
diff --git a/sorc/gsi_enkf.fd b/sorc/gsi_enkf.fd
index c94bc72ff4..74ac594211 160000
--- a/sorc/gsi_enkf.fd
+++ b/sorc/gsi_enkf.fd
@@ -1 +1 @@
-Subproject commit c94bc72ff410b48c325abbfe92c9fcb601d89aed
+Subproject commit 74ac5942118d2a83ca84d3a629ec3aaffdb36fc5
diff --git a/sorc/gsi_monitor.fd b/sorc/gsi_monitor.fd
index ae256c0d69..fb39e83880 160000
--- a/sorc/gsi_monitor.fd
+++ b/sorc/gsi_monitor.fd
@@ -1 +1 @@
-Subproject commit ae256c0d69df3232ee9dd3e81b176bf2c3cda312
+Subproject commit fb39e83880d44d433bed9af856bc5178bf63d64c
diff --git a/sorc/gsi_utils.fd b/sorc/gsi_utils.fd
index 90481d9618..55abe58825 160000
--- a/sorc/gsi_utils.fd
+++ b/sorc/gsi_utils.fd
@@ -1 +1 @@
-Subproject commit 90481d961854e4412ecac49991721e6e63d4b82e
+Subproject commit 55abe588252ec6f39047d54a14727cf59f7f6688
diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh
index 0bbe80d942..be8606a6b5 100755
--- a/sorc/link_workflow.sh
+++ b/sorc/link_workflow.sh
@@ -107,7 +107,6 @@ for dir in aer \
lut \
mom6 \
orog \
- reg2grb2 \
sfc_climo \
ugwd \
verif \
@@ -135,16 +134,20 @@ for file in postxconfig-NT-GEFS-F00.txt postxconfig-NT-GEFS.txt postxconfig-NT-G
postxconfig-NT-GFS-ANL.txt postxconfig-NT-GFS-F00.txt postxconfig-NT-GFS-FLUX-F00.txt \
postxconfig-NT-GFS.txt postxconfig-NT-GFS-FLUX.txt postxconfig-NT-GFS-GOES.txt \
postxconfig-NT-GFS-F00-TWO.txt postxconfig-NT-GFS-TWO.txt \
- params_grib2_tbl_new post_tag_gfs128 post_tag_gfs65 nam_micro_lookup.dat
+ params_grib2_tbl_new post_tag_gfs128 post_tag_gfs65 nam_micro_lookup.dat
do
${LINK_OR_COPY} "${HOMEgfs}/sorc/upp.fd/parm/${file}" .
done
for file in optics_luts_DUST.dat optics_luts_DUST_nasa.dat optics_luts_NITR_nasa.dat \
optics_luts_SALT.dat optics_luts_SALT_nasa.dat optics_luts_SOOT.dat optics_luts_SOOT_nasa.dat \
- optics_luts_SUSO.dat optics_luts_SUSO_nasa.dat optics_luts_WASO.dat optics_luts_WASO_nasa.dat
+ optics_luts_SUSO.dat optics_luts_SUSO_nasa.dat optics_luts_WASO.dat optics_luts_WASO_nasa.dat
do
${LINK_OR_COPY} "${HOMEgfs}/sorc/upp.fd/fix/chem/${file}" .
done
+for file in ice.csv ocean.csv ocnicepost.nml.jinja2
+do
+ ${LINK_OR_COPY} "${HOMEgfs}/sorc/gfs_utils.fd/parm/ocnicepost/${file}" .
+done
cd "${HOMEgfs}/scripts" || exit 8
${LINK_OR_COPY} "${HOMEgfs}/sorc/ufs_utils.fd/scripts/exemcsfc_global_sfc_prep.sh" .
@@ -152,7 +155,7 @@ cd "${HOMEgfs}/ush" || exit 8
for file in emcsfc_ice_blend.sh global_cycle_driver.sh emcsfc_snow.sh global_cycle.sh; do
${LINK_OR_COPY} "${HOMEgfs}/sorc/ufs_utils.fd/ush/${file}" .
done
-for file in finddate.sh make_ntc_bull.pl make_NTC_file.pl make_tif.sh month_name.sh ; do
+for file in make_ntc_bull.pl make_NTC_file.pl make_tif.sh month_name.sh ; do
${LINK_OR_COPY} "${HOMEgfs}/sorc/gfs_utils.fd/ush/${file}" .
done
@@ -187,7 +190,7 @@ if [[ -d "${HOMEgfs}/sorc/gdas.cd" ]]; then
cd "${HOMEgfs}/fix" || exit 1
[[ ! -d gdas ]] && mkdir -p gdas
cd gdas || exit 1
- for gdas_sub in fv3jedi gsibec; do
+ for gdas_sub in fv3jedi gsibec obs; do
if [[ -d "${gdas_sub}" ]]; then
rm -rf "${gdas_sub}"
fi
@@ -243,7 +246,7 @@ cd "${HOMEgfs}/exec" || exit 1
for utilexe in fbwndgfs.x gaussian_sfcanl.x gfs_bufr.x supvit.x syndat_getjtbul.x \
syndat_maksynrc.x syndat_qctropcy.x tocsbufr.x overgridid.x \
- mkgfsawps.x enkf_chgres_recenter_nc.x tave.x vint.x reg2grb2.x
+ mkgfsawps.x enkf_chgres_recenter_nc.x tave.x vint.x ocnicepost.x
do
[[ -s "${utilexe}" ]] && rm -f "${utilexe}"
${LINK_OR_COPY} "${HOMEgfs}/sorc/gfs_utils.fd/install/bin/${utilexe}" .
@@ -401,7 +404,6 @@ for prog in enkf_chgres_recenter_nc.fd \
mkgfsawps.fd \
overgridid.fd \
rdbfmsua.fd \
- reg2grb2.fd \
supvit.fd \
syndat_getjtbul.fd \
syndat_maksynrc.fd \
@@ -409,7 +411,8 @@ for prog in enkf_chgres_recenter_nc.fd \
tave.fd \
tocsbufr.fd \
vint.fd \
- webtitle.fd
+ webtitle.fd \
+ ocnicepost.fd
do
if [[ -d "${prog}" ]]; then rm -rf "${prog}"; fi
${LINK_OR_COPY} "gfs_utils.fd/src/${prog}" .
diff --git a/sorc/ncl.setup b/sorc/ncl.setup
deleted file mode 100644
index b4981689db..0000000000
--- a/sorc/ncl.setup
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/bin/bash
-
-set +x
-case ${target} in
- 'jet'|'hera')
- module load ncl/6.5.0
- export NCARG_LIB=${NCARG_ROOT}/lib
- ;;
- *)
- echo "[${BASH_SOURCE[0]}]: unknown ${target}"
- ;;
-esac
diff --git a/sorc/ufs_model.fd b/sorc/ufs_model.fd
index a837cfcb0c..6988662728 160000
--- a/sorc/ufs_model.fd
+++ b/sorc/ufs_model.fd
@@ -1 +1 @@
-Subproject commit a837cfcb0c79803e13a1375a362d3d0b14b62320
+Subproject commit 698866272846e8c0f8f61ddb1b20d6463460cd63
diff --git a/sorc/ufs_utils.fd b/sorc/ufs_utils.fd
index c3fb3bceaa..47956a96a7 160000
--- a/sorc/ufs_utils.fd
+++ b/sorc/ufs_utils.fd
@@ -1 +1 @@
-Subproject commit c3fb3bceaa1a324599597c9cf8a42d19af817254
+Subproject commit 47956a96a7dc14e33fbccbb7fe74422f9bf542bf
diff --git a/sorc/verif-global.fd b/sorc/verif-global.fd
index c267780a12..bd1c8f62a1 160000
--- a/sorc/verif-global.fd
+++ b/sorc/verif-global.fd
@@ -1 +1 @@
-Subproject commit c267780a1255fa7db052c745cf9c78b7dc6a2695
+Subproject commit bd1c8f62a1878051e34ff7c6f6a4dd290381f1ef
diff --git a/ush/calcanl_gfs.py b/ush/calcanl_gfs.py
index cf2dc8dc89..25bbae4bce 100755
--- a/ush/calcanl_gfs.py
+++ b/ush/calcanl_gfs.py
@@ -346,7 +346,7 @@ def calcanl_gfs(DoIAU, l4DEnsVar, Write4Danl, ComOut, APrefix,
ComOut = os.getenv('COM_ATMOS_ANALYSIS', './')
APrefix = os.getenv('APREFIX', '')
NThreads = os.getenv('NTHREADS_CHGRES', 1)
- FixDir = os.getenv('FIXam', './')
+ FixDir = os.path.join(os.getenv('FIXgfs', './'), 'am')
atmges_ens_mean = os.getenv('ATMGES_ENSMEAN', './atmges_ensmean')
RunDir = os.getenv('DATA', './')
ExecCMD = os.getenv('APRUN_CALCANL', '')
diff --git a/ush/forecast_det.sh b/ush/forecast_det.sh
index e1a2a49a7e..198df6505f 100755
--- a/ush/forecast_det.sh
+++ b/ush/forecast_det.sh
@@ -8,7 +8,7 @@
## This script is a definition of functions.
#####
-# For all non-evironment variables
+# For all non-environment variables
# Cycling and forecast hour specific parameters
FV3_det(){
diff --git a/ush/forecast_postdet.sh b/ush/forecast_postdet.sh
index 8e40d6c881..644a2180da 100755
--- a/ush/forecast_postdet.sh
+++ b/ush/forecast_postdet.sh
@@ -133,17 +133,15 @@ EOF
#--------------------------------------------------------------------------
# Grid and orography data
- FIXsfc=${FIXsfc:-"${FIXorog}/${CASE}/sfc"}
-
if [[ ${cplflx} = ".false." ]] ; then
- ${NLN} "${FIXorog}/${CASE}/${CASE}_mosaic.nc" "${DATA}/INPUT/grid_spec.nc"
+ ${NLN} "${FIXgfs}/orog/${CASE}/${CASE}_mosaic.nc" "${DATA}/INPUT/grid_spec.nc"
else
- ${NLN} "${FIXorog}/${CASE}/${CASE}_mosaic.nc" "${DATA}/INPUT/${CASE}_mosaic.nc"
+ ${NLN} "${FIXgfs}/orog/${CASE}/${CASE}_mosaic.nc" "${DATA}/INPUT/${CASE}_mosaic.nc"
fi
for n in $(seq 1 "${ntiles}"); do
- ${NLN} "${FIXorog}/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/INPUT/oro_data.tile${n}.nc"
- ${NLN} "${FIXorog}/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/INPUT/${CASE}_grid.tile${n}.nc"
+ ${NLN} "${FIXgfs}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/INPUT/oro_data.tile${n}.nc"
+ ${NLN} "${FIXgfs}/orog/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/INPUT/${CASE}_grid.tile${n}.nc"
done
_suite_file="${HOMEgfs}/sorc/ufs_model.fd/FV3/ccpp/suites/suite_${CCPP_SUITE}.xml"
@@ -201,10 +199,10 @@ EOF
fi
# Files for GWD
- ${NLN} "${FIXugwd}/ugwp_limb_tau.nc" "${DATA}/ugwp_limb_tau.nc"
+ ${NLN} "${FIXgfs}/ugwd/ugwp_limb_tau.nc" "${DATA}/ugwp_limb_tau.nc"
for n in $(seq 1 "${ntiles}"); do
- ${NLN} "${FIXugwd}/${CASE}/${CASE}_oro_data_ls.tile${n}.nc" "${DATA}/INPUT/oro_data_ls.tile${n}.nc"
- ${NLN} "${FIXugwd}/${CASE}/${CASE}_oro_data_ss.tile${n}.nc" "${DATA}/INPUT/oro_data_ss.tile${n}.nc"
+ ${NLN} "${FIXgfs}/ugwd/${CASE}/${CASE}_oro_data_ls.tile${n}.nc" "${DATA}/INPUT/oro_data_ls.tile${n}.nc"
+ ${NLN} "${FIXgfs}/ugwd/${CASE}/${CASE}_oro_data_ss.tile${n}.nc" "${DATA}/INPUT/oro_data_ss.tile${n}.nc"
done
# GFS standard input data
@@ -225,51 +223,51 @@ EOF
# imp_physics should be 8:
####
if [[ ${imp_physics} -eq 8 ]]; then
- ${NLN} "${FIXam}/CCN_ACTIVATE.BIN" "${DATA}/CCN_ACTIVATE.BIN"
- ${NLN} "${FIXam}/freezeH2O.dat" "${DATA}/freezeH2O.dat"
- ${NLN} "${FIXam}/qr_acr_qgV2.dat" "${DATA}/qr_acr_qgV2.dat"
- ${NLN} "${FIXam}/qr_acr_qsV2.dat" "${DATA}/qr_acr_qsV2.dat"
+ ${NLN} "${FIXgfs}/am/CCN_ACTIVATE.BIN" "${DATA}/CCN_ACTIVATE.BIN"
+ ${NLN} "${FIXgfs}/am/freezeH2O.dat" "${DATA}/freezeH2O.dat"
+ ${NLN} "${FIXgfs}/am/qr_acr_qgV2.dat" "${DATA}/qr_acr_qgV2.dat"
+ ${NLN} "${FIXgfs}/am/qr_acr_qsV2.dat" "${DATA}/qr_acr_qsV2.dat"
fi
- ${NLN} "${FIXam}/${O3FORC}" "${DATA}/global_o3prdlos.f77"
- ${NLN} "${FIXam}/${H2OFORC}" "${DATA}/global_h2oprdlos.f77"
- ${NLN} "${FIXam}/global_solarconstant_noaa_an.txt" "${DATA}/solarconstant_noaa_an.txt"
- ${NLN} "${FIXam}/global_sfc_emissivity_idx.txt" "${DATA}/sfc_emissivity_idx.txt"
+ ${NLN} "${FIXgfs}/am/${O3FORC}" "${DATA}/global_o3prdlos.f77"
+ ${NLN} "${FIXgfs}/am/${H2OFORC}" "${DATA}/global_h2oprdlos.f77"
+ ${NLN} "${FIXgfs}/am/global_solarconstant_noaa_an.txt" "${DATA}/solarconstant_noaa_an.txt"
+ ${NLN} "${FIXgfs}/am/global_sfc_emissivity_idx.txt" "${DATA}/sfc_emissivity_idx.txt"
## merra2 aerosol climo
if [[ ${IAER} -eq "1011" ]]; then
for month in $(seq 1 12); do
MM=$(printf %02d "${month}")
- ${NLN} "${FIXaer}/merra2.aerclim.2003-2014.m${MM}.nc" "aeroclim.m${MM}.nc"
+ ${NLN} "${FIXgfs}/aer/merra2.aerclim.2003-2014.m${MM}.nc" "aeroclim.m${MM}.nc"
done
- ${NLN} "${FIXlut}/optics_BC.v1_3.dat" "${DATA}/optics_BC.dat"
- ${NLN} "${FIXlut}/optics_OC.v1_3.dat" "${DATA}/optics_OC.dat"
- ${NLN} "${FIXlut}/optics_DU.v15_3.dat" "${DATA}/optics_DU.dat"
- ${NLN} "${FIXlut}/optics_SS.v3_3.dat" "${DATA}/optics_SS.dat"
- ${NLN} "${FIXlut}/optics_SU.v1_3.dat" "${DATA}/optics_SU.dat"
+ ${NLN} "${FIXgfs}/lut/optics_BC.v1_3.dat" "${DATA}/optics_BC.dat"
+ ${NLN} "${FIXgfs}/lut/optics_OC.v1_3.dat" "${DATA}/optics_OC.dat"
+ ${NLN} "${FIXgfs}/lut/optics_DU.v15_3.dat" "${DATA}/optics_DU.dat"
+ ${NLN} "${FIXgfs}/lut/optics_SS.v3_3.dat" "${DATA}/optics_SS.dat"
+ ${NLN} "${FIXgfs}/lut/optics_SU.v1_3.dat" "${DATA}/optics_SU.dat"
fi
- ${NLN} "${FIXam}/global_co2historicaldata_glob.txt" "${DATA}/co2historicaldata_glob.txt"
- ${NLN} "${FIXam}/co2monthlycyc.txt" "${DATA}/co2monthlycyc.txt"
+ ${NLN} "${FIXgfs}/am/global_co2historicaldata_glob.txt" "${DATA}/co2historicaldata_glob.txt"
+ ${NLN} "${FIXgfs}/am/co2monthlycyc.txt" "${DATA}/co2monthlycyc.txt"
if [[ ${ICO2} -gt 0 ]]; then
- for file in $(ls "${FIXam}/fix_co2_proj/global_co2historicaldata"*) ; do
+ for file in $(ls "${FIXgfs}/am/fix_co2_proj/global_co2historicaldata"*) ; do
${NLN} "${file}" "${DATA}/$(basename "${file//global_}")"
done
fi
- ${NLN} "${FIXam}/global_climaeropac_global.txt" "${DATA}/aerosol.dat"
+ ${NLN} "${FIXgfs}/am/global_climaeropac_global.txt" "${DATA}/aerosol.dat"
if [[ ${IAER} -gt 0 ]] ; then
- for file in $(ls "${FIXam}/global_volcanic_aerosols"*) ; do
+ for file in $(ls "${FIXgfs}/am/global_volcanic_aerosols"*) ; do
${NLN} "${file}" "${DATA}/$(basename "${file//global_}")"
done
fi
# inline post fix files
if [[ ${WRITE_DOPOST} = ".true." ]]; then
- ${NLN} "${PARM_POST}/post_tag_gfs${LEVS}" "${DATA}/itag"
- ${NLN} "${FLTFILEGFS:-${PARM_POST}/postxconfig-NT-GFS-TWO.txt}" "${DATA}/postxconfig-NT.txt"
- ${NLN} "${FLTFILEGFSF00:-${PARM_POST}/postxconfig-NT-GFS-F00-TWO.txt}" "${DATA}/postxconfig-NT_FH00.txt"
- ${NLN} "${POSTGRB2TBL:-${PARM_POST}/params_grib2_tbl_new}" "${DATA}/params_grib2_tbl_new"
+ ${NLN} "${PARMgfs}/post/post_tag_gfs${LEVS}" "${DATA}/itag"
+ ${NLN} "${FLTFILEGFS:-${PARMgfs}/post/postxconfig-NT-GFS-TWO.txt}" "${DATA}/postxconfig-NT.txt"
+ ${NLN} "${FLTFILEGFSF00:-${PARMgfs}/post/postxconfig-NT-GFS-F00-TWO.txt}" "${DATA}/postxconfig-NT_FH00.txt"
+ ${NLN} "${POSTGRB2TBL:-${PARMgfs}/post/params_grib2_tbl_new}" "${DATA}/params_grib2_tbl_new"
fi
#------------------------------------------------------------------
@@ -296,28 +294,28 @@ EOF
LATB_JMO=${LATB_JMO:-${LATB_CASE}}
# Fix files
- FNGLAC=${FNGLAC:-"${FIXam}/global_glacier.2x2.grb"}
- FNMXIC=${FNMXIC:-"${FIXam}/global_maxice.2x2.grb"}
- FNTSFC=${FNTSFC:-"${FIXam}/RTGSST.1982.2012.monthly.clim.grb"}
- FNSNOC=${FNSNOC:-"${FIXam}/global_snoclim.1.875.grb"}
+ FNGLAC=${FNGLAC:-"${FIXgfs}/am/global_glacier.2x2.grb"}
+ FNMXIC=${FNMXIC:-"${FIXgfs}/am/global_maxice.2x2.grb"}
+ FNTSFC=${FNTSFC:-"${FIXgfs}/am/RTGSST.1982.2012.monthly.clim.grb"}
+ FNSNOC=${FNSNOC:-"${FIXgfs}/am/global_snoclim.1.875.grb"}
FNZORC=${FNZORC:-"igbp"}
- FNAISC=${FNAISC:-"${FIXam}/IMS-NIC.blended.ice.monthly.clim.grb"}
- FNALBC2=${FNALBC2:-"${FIXsfc}/${CASE}.mx${OCNRES}.facsf.tileX.nc"}
- FNTG3C=${FNTG3C:-"${FIXsfc}/${CASE}.mx${OCNRES}.substrate_temperature.tileX.nc"}
- FNVEGC=${FNVEGC:-"${FIXsfc}/${CASE}.mx${OCNRES}.vegetation_greenness.tileX.nc"}
- FNMSKH=${FNMSKH:-"${FIXam}/global_slmask.t1534.3072.1536.grb"}
- FNVMNC=${FNVMNC:-"${FIXsfc}/${CASE}.mx${OCNRES}.vegetation_greenness.tileX.nc"}
- FNVMXC=${FNVMXC:-"${FIXsfc}/${CASE}.mx${OCNRES}.vegetation_greenness.tileX.nc"}
- FNSLPC=${FNSLPC:-"${FIXsfc}/${CASE}.mx${OCNRES}.slope_type.tileX.nc"}
- FNALBC=${FNALBC:-"${FIXsfc}/${CASE}.mx${OCNRES}.snowfree_albedo.tileX.nc"}
- FNVETC=${FNVETC:-"${FIXsfc}/${CASE}.mx${OCNRES}.vegetation_type.tileX.nc"}
- FNSOTC=${FNSOTC:-"${FIXsfc}/${CASE}.mx${OCNRES}.soil_type.tileX.nc"}
- FNSOCC=${FNSOCC:-"${FIXsfc}/${CASE}.mx${OCNRES}.soil_color.tileX.nc"}
- FNABSC=${FNABSC:-"${FIXsfc}/${CASE}.mx${OCNRES}.maximum_snow_albedo.tileX.nc"}
- FNSMCC=${FNSMCC:-"${FIXam}/global_soilmgldas.statsgo.t${JCAP}.${LONB}.${LATB}.grb"}
+ FNAISC=${FNAISC:-"${FIXgfs}/am/IMS-NIC.blended.ice.monthly.clim.grb"}
+ FNALBC2=${FNALBC2:-"${FIXgfs}/orog/${CASE}/sfc/${CASE}.mx${OCNRES}.facsf.tileX.nc"}
+ FNTG3C=${FNTG3C:-"${FIXgfs}/orog/${CASE}/sfc/${CASE}.mx${OCNRES}.substrate_temperature.tileX.nc"}
+ FNVEGC=${FNVEGC:-"${FIXgfs}/orog/${CASE}/sfc/${CASE}.mx${OCNRES}.vegetation_greenness.tileX.nc"}
+ FNMSKH=${FNMSKH:-"${FIXgfs}/am/global_slmask.t1534.3072.1536.grb"}
+ FNVMNC=${FNVMNC:-"${FIXgfs}/orog/${CASE}/sfc/${CASE}.mx${OCNRES}.vegetation_greenness.tileX.nc"}
+ FNVMXC=${FNVMXC:-"${FIXgfs}/orog/${CASE}/sfc/${CASE}.mx${OCNRES}.vegetation_greenness.tileX.nc"}
+ FNSLPC=${FNSLPC:-"${FIXgfs}/orog/${CASE}/sfc/${CASE}.mx${OCNRES}.slope_type.tileX.nc"}
+ FNALBC=${FNALBC:-"${FIXgfs}/orog/${CASE}/sfc/${CASE}.mx${OCNRES}.snowfree_albedo.tileX.nc"}
+ FNVETC=${FNVETC:-"${FIXgfs}/orog/${CASE}/sfc/${CASE}.mx${OCNRES}.vegetation_type.tileX.nc"}
+ FNSOTC=${FNSOTC:-"${FIXgfs}/orog/${CASE}/sfc/${CASE}.mx${OCNRES}.soil_type.tileX.nc"}
+ FNSOCC=${FNSOCC:-"${FIXgfs}/orog/${CASE}/sfc/${CASE}.mx${OCNRES}.soil_color.tileX.nc"}
+ FNABSC=${FNABSC:-"${FIXgfs}/orog/${CASE}/sfc/${CASE}.mx${OCNRES}.maximum_snow_albedo.tileX.nc"}
+ FNSMCC=${FNSMCC:-"${FIXgfs}/am/global_soilmgldas.statsgo.t${JCAP}.${LONB}.${LATB}.grb"}
# If the appropriate resolution fix file is not present, use the highest resolution available (T1534)
- [[ ! -f ${FNSMCC} ]] && FNSMCC="${FIXam}/global_soilmgldas.statsgo.t1534.3072.1536.grb"
+ [[ ! -f ${FNSMCC} ]] && FNSMCC="${FIXgfs}/am/global_soilmgldas.statsgo.t1534.3072.1536.grb"
# NSST Options
# nstf_name contains the NSST related parameters
@@ -463,8 +461,6 @@ EOF
LONB_STP=${LONB_STP:-${LONB_CASE}}
LATB_STP=${LATB_STP:-${LATB_CASE}}
cd "${DATA}" || exit 1
- if [[ ! -d ${COM_ATMOS_HISTORY} ]]; then mkdir -p "${COM_ATMOS_HISTORY}"; fi
- if [[ ! -d ${COM_ATMOS_MASTER} ]]; then mkdir -p "${COM_ATMOS_MASTER}"; fi
if [[ "${QUILTING}" = ".true." ]] && [[ "${OUTPUT_GRID}" = "gaussian_grid" ]]; then
for fhr in ${FV3_OUTPUT_FH}; do
local FH3=$(printf %03i "${fhr}")
@@ -503,7 +499,6 @@ FV3_out() {
# Copy FV3 restart files
if [[ ${RUN} =~ "gdas" ]]; then
cd "${DATA}/RESTART"
- mkdir -p "${COM_ATMOS_RESTART}"
local idate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${restart_interval} hours" +%Y%m%d%H)
while [[ ${idate} -le ${forecast_end_cycle} ]]; do
for file in "${idate:0:8}.${idate:8:2}0000."*; do
@@ -544,7 +539,7 @@ WW3_postdet() {
if [[ "${MESH_WAV}" == "${MESH_OCN:-mesh.mx${OCNRES}.nc}" ]]; then
echo "Wave is on same mesh as ocean"
else
- ${NLN} "${FIXwave}/${MESH_WAV}" "${DATA}/"
+ ${NLN} "${FIXgfs}/wave/${MESH_WAV}" "${DATA}/"
fi
export wavprfx=${RUNwave}${WAV_MEMBER:-}
@@ -604,8 +599,6 @@ WW3_postdet() {
${NLN} "${wavcurfile}" "${DATA}/current.${WAVECUR_FID}"
fi
- if [[ ! -d ${COM_WAVE_HISTORY} ]]; then mkdir -p "${COM_WAVE_HISTORY}"; fi
-
# Link output files
cd "${DATA}"
if [[ ${waveMULTIGRID} = ".true." ]]; then
@@ -650,8 +643,8 @@ WW3_nml() {
echo "SUB ${FUNCNAME[0]}: Copying input files for WW3"
WAV_MOD_TAG=${RUN}wave${waveMEMB}
if [[ "${USE_WAV_RMP:-YES}" = "YES" ]]; then
- if (( $( ls -1 "${FIXwave}/rmp_src_to_dst_conserv_"* 2> /dev/null | wc -l) > 0 )); then
- for file in $(ls "${FIXwave}/rmp_src_to_dst_conserv_"*) ; do
+ if (( $( ls -1 "${FIXgfs}/wave/rmp_src_to_dst_conserv_"* 2> /dev/null | wc -l) > 0 )); then
+ for file in $(ls "${FIXgfs}/wave/rmp_src_to_dst_conserv_"*) ; do
${NLN} "${file}" "${DATA}/"
done
else
@@ -682,6 +675,7 @@ MOM6_postdet() {
${NLN} "${COM_OCEAN_RESTART_PREV}/${sPDY}.${scyc}0000.MOM.res.nc" "${DATA}/INPUT/MOM.res.nc"
case ${OCNRES} in
"025")
+ local nn
for nn in $(seq 1 4); do
if [[ -f "${COM_OCEAN_RESTART_PREV}/${sPDY}.${scyc}0000.MOM.res_${nn}.nc" ]]; then
${NLN} "${COM_OCEAN_RESTART_PREV}/${sPDY}.${scyc}0000.MOM.res_${nn}.nc" "${DATA}/INPUT/MOM.res_${nn}.nc"
@@ -700,10 +694,10 @@ MOM6_postdet() {
fi
# Copy MOM6 fixed files
- ${NCP} "${FIXmom}/${OCNRES}/"* "${DATA}/INPUT/"
+ ${NCP} "${FIXgfs}/mom6/${OCNRES}/"* "${DATA}/INPUT/" # TODO: These need to be explicit
# Copy coupled grid_spec
- spec_file="${FIXcpl}/a${CASE}o${OCNRES}/grid_spec.nc"
+ spec_file="${FIXgfs}/cpl/a${CASE}o${OCNRES}/grid_spec.nc"
if [[ -s ${spec_file} ]]; then
${NCP} "${spec_file}" "${DATA}/INPUT/"
else
@@ -711,27 +705,6 @@ MOM6_postdet() {
exit 3
fi
- # Copy mediator restart files to RUNDIR # TODO: mediator should have its own CMEPS_postdet() function
- if [[ ${warm_start} = ".true." ]]; then
- local mediator_file="${COM_MED_RESTART}/${PDY}.${cyc}0000.ufs.cpld.cpl.r.nc"
- if [[ -f "${mediator_file}" ]]; then
- ${NCP} "${mediator_file}" "${DATA}/ufs.cpld.cpl.r.nc"
- rm -f "${DATA}/rpointer.cpl"
- touch "${DATA}/rpointer.cpl"
- echo "ufs.cpld.cpl.r.nc" >> "${DATA}/rpointer.cpl"
- else
- # We have a choice to make here.
- # Either we can FATAL ERROR out, or we can let the coupling fields initialize from zero
- # cmeps_run_type is determined based on the availability of the mediator restart file
- echo "WARNING: ${mediator_file} does not exist for warm_start = .true., initializing!"
- #echo "FATAL ERROR: ${mediator_file} must exist for warm_start = .true. and does not, ABORT!"
- #exit 4
- fi
- else
- # This is a cold start, so initialize the coupling fields from zero
- export cmeps_run_type="startup"
- fi
-
# If using stochatic parameterizations, create a seed that does not exceed the
# largest signed integer
if [[ "${DO_OCN_SPPT}" = "YES" ]] || [[ "${DO_OCN_PERT_EPBL}" = "YES" ]]; then
@@ -743,58 +716,53 @@ MOM6_postdet() {
fi
fi
- # Create COMOUTocean
- [[ ! -d ${COM_OCEAN_HISTORY} ]] && mkdir -p "${COM_OCEAN_HISTORY}"
-
# Link output files
if [[ "${RUN}" =~ "gfs" || "${RUN}" =~ "gefs" ]]; then
- # Link output files for RUN = gfs
-
- # TODO: get requirements on what files need to be written out and what these dates here are and what they mean
+ # Link output files for RUN = gfs|gefs
- if [[ ! -d ${COM_OCEAN_HISTORY} ]]; then mkdir -p "${COM_OCEAN_HISTORY}"; fi
+ # Looping over MOM6 output hours
+ local fhr fhr3 last_fhr interval midpoint vdate vdate_mid source_file dest_file
+ for fhr in ${MOM6_OUTPUT_FH}; do
+ fhr3=$(printf %03i "${fhr}")
- # Looping over FV3 output hours
- # TODO: Need to define MOM6_OUTPUT_FH and control at some point for issue #1629
- for fhr in ${FV3_OUTPUT_FH}; do
if [[ -z ${last_fhr:-} ]]; then
- local last_fhr=${fhr}
+ last_fhr=${fhr}
continue
fi
+
(( interval = fhr - last_fhr ))
(( midpoint = last_fhr + interval/2 ))
- local vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y%m%d%H)
- local vdate_mid=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${midpoint} hours" +%Y%m%d%H)
-
+ vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y%m%d%H)
+ vdate_mid=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${midpoint} hours" +%Y%m%d%H)
# Native model output uses window midpoint in the filename, but we are mapping that to the end of the period for COM
- local source_file="ocn_${vdate_mid:0:4}_${vdate_mid:4:2}_${vdate_mid:6:2}_${vdate_mid:8:2}.nc"
- local dest_file="ocn${vdate}.${ENSMEM}.${current_cycle}.nc"
+ source_file="ocn_${vdate_mid:0:4}_${vdate_mid:4:2}_${vdate_mid:6:2}_${vdate_mid:8:2}.nc"
+ dest_file="${RUN}.ocean.t${cyc}z.${interval}hr_avg.f${fhr3}.nc"
${NLN} "${COM_OCEAN_HISTORY}/${dest_file}" "${DATA}/${source_file}"
- local source_file="ocn_daily_${vdate:0:4}_${vdate:4:2}_${vdate:6:2}.nc"
- local dest_file=${source_file}
- if [[ ! -a "${DATA}/${source_file}" ]]; then
+ # Daily output
+ if (( fhr > 0 & fhr % 24 == 0 )); then
+ source_file="ocn_daily_${vdate:0:4}_${vdate:4:2}_${vdate:6:2}.nc"
+ dest_file="${RUN}.ocean.t${cyc}z.daily.f${fhr3}.nc"
${NLN} "${COM_OCEAN_HISTORY}/${dest_file}" "${DATA}/${source_file}"
fi
- local last_fhr=${fhr}
+ last_fhr=${fhr}
+
done
elif [[ "${RUN}" =~ "gdas" ]]; then
# Link output files for RUN = gdas
- # Save MOM6 backgrounds
- for fhr in ${FV3_OUTPUT_FH}; do
- local idatestr=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y_%m_%d_%H)
+ # Save (instantaneous) MOM6 backgrounds
+ for fhr in ${MOM6_OUTPUT_FH}; do
local fhr3=$(printf %03i "${fhr}")
- ${NLN} "${COM_OCEAN_HISTORY}/${RUN}.t${cyc}z.ocnf${fhr3}.nc" "${DATA}/ocn_da_${idatestr}.nc"
+ local vdatestr=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y_%m_%d_%H)
+ ${NLN} "${COM_OCEAN_HISTORY}/${RUN}.ocean.t${cyc}z.inst.f${fhr3}.nc" "${DATA}/ocn_da_${vdatestr}.nc"
done
fi
- mkdir -p "${COM_OCEAN_RESTART}"
-
# Link ocean restarts from DATA to COM
# Coarser than 1/2 degree has a single MOM restart
${NLN} "${COM_OCEAN_RESTART}/${forecast_end_cycle:0:8}.${forecast_end_cycle:8:2}0000.MOM.res.nc" "${DATA}/MOM6_RESTART/"
@@ -809,10 +777,16 @@ MOM6_postdet() {
;;
esac
- # Loop over restart_interval frequency and link restarts from DATA to COM
- local idate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${restart_interval} hours" +%Y%m%d%H)
- while [[ ${idate} -lt ${forecast_end_cycle} ]]; do
- local idatestr=$(date +%Y-%m-%d-%H -d "${idate:0:8} ${idate:8:2}")
+ if [[ "${RUN}" =~ "gdas" ]]; then
+ local interval idate
+ if [[ "${DOIAU}" = "YES" ]]; then
+ # Link restarts at the beginning of the next cycle from DATA to COM
+ interval=$(( assim_freq / 2 ))
+ idate=$(date --utc -d "${next_cycle:0:8} ${next_cycle:8:2} - ${interval} hours" +%Y%m%d%H)
+ else
+ # Link restarts at the middle of the next cycle from DATA to COM
+ idate="${next_cycle}"
+ fi
${NLN} "${COM_OCEAN_RESTART}/${idate:0:8}.${idate:8:2}0000.MOM.res.nc" "${DATA}/MOM6_RESTART/"
case ${OCNRES} in
"025")
@@ -821,23 +795,7 @@ MOM6_postdet() {
done
;;
esac
- local idate=$(date --utc -d "${idate:0:8} ${idate:8:2} + ${restart_interval} hours" +%Y%m%d%H)
- done
-
- # TODO: mediator should have its own CMEPS_postdet() function
- # Link mediator restarts from DATA to COM
- # DANGER DANGER DANGER - Linking mediator restarts to COM causes the model to fail with a message like this below:
- # Abort with message NetCDF: File exists && NC_NOCLOBBER in file pio-2.5.7/src/clib/pioc_support.c at line 2173
- # Instead of linking, copy the mediator files after the model finishes
- #local COMOUTmed="${ROTDIR}/${RUN}.${PDY}/${cyc}/med"
- #mkdir -p "${COMOUTmed}/RESTART"
- #local idate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${restart_interval} hours" +%Y%m%d%H)
- #while [[ ${idate} -le ${forecast_end_cycle} ]]; do
- # local seconds=$(to_seconds ${idate:8:2}0000) # use function to_seconds from forecast_predet.sh to convert HHMMSS to seconds
- # local idatestr="${idate:0:4}-${idate:4:2}-${idate:6:2}-${seconds}"
- # ${NLN} "${COMOUTmed}/RESTART/${idate:0:8}.${idate:8:2}0000.ufs.cpld.cpl.r.nc" "${DATA}/RESTART/ufs.cpld.cpl.r.${idatestr}.nc"
- # local idate=$(date --utc -d "${idate:0:8} ${idate:8:2} + ${restart_interval} hours" +%Y%m%d%H)
- #done
+ fi
echo "SUB ${FUNCNAME[0]}: MOM6 input data linked/copied"
@@ -853,26 +811,8 @@ MOM6_out() {
echo "SUB ${FUNCNAME[0]}: Copying output data for MOM6"
# Copy MOM_input from DATA to COM_OCEAN_INPUT after the forecast is run (and successfull)
- if [[ ! -d ${COM_OCEAN_INPUT} ]]; then mkdir -p "${COM_OCEAN_INPUT}"; fi
${NCP} "${DATA}/INPUT/MOM_input" "${COM_CONF}/ufs.MOM_input"
- # TODO: mediator should have its own CMEPS_out() function
- # Copy mediator restarts from DATA to COM
- # Linking mediator restarts to COM causes the model to fail with a message.
- # See MOM6_postdet() function for error message
- mkdir -p "${COM_MED_RESTART}"
- local idate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${restart_interval} hours" +%Y%m%d%H)
- while [[ ${idate} -le ${forecast_end_cycle} ]]; do
- local seconds=$(to_seconds "${idate:8:2}"0000) # use function to_seconds from forecast_predet.sh to convert HHMMSS to seconds
- local idatestr="${idate:0:4}-${idate:4:2}-${idate:6:2}-${seconds}"
- local mediator_file="${DATA}/RESTART/ufs.cpld.cpl.r.${idatestr}.nc"
- if [[ -f ${mediator_file} ]]; then
- ${NCP} "${DATA}/RESTART/ufs.cpld.cpl.r.${idatestr}.nc" "${COM_MED_RESTART}/${idate:0:8}.${idate:8:2}0000.ufs.cpld.cpl.r.nc"
- else
- echo "Mediator restart ${mediator_file} not found."
- fi
- local idate=$(date --utc -d "${idate:0:8} ${idate:8:2} + ${restart_interval} hours" +%Y%m%d%H)
- done
}
CICE_postdet() {
@@ -891,58 +831,44 @@ CICE_postdet() {
echo "${DATA}/cice_model.res.nc" > "${DATA}/ice.restart_file"
echo "Link CICE fixed files"
- ${NLN} "${FIXcice}/${ICERES}/${CICE_GRID}" "${DATA}/"
- ${NLN} "${FIXcice}/${ICERES}/${CICE_MASK}" "${DATA}/"
- ${NLN} "${FIXcice}/${ICERES}/${MESH_ICE}" "${DATA}/"
-
- # Link CICE output files
- if [[ ! -d "${COM_ICE_HISTORY}" ]]; then mkdir -p "${COM_ICE_HISTORY}"; fi
- mkdir -p "${COM_ICE_RESTART}"
-
- if [[ "${RUN}" =~ "gfs" || "${RUN}" =~ "gefs" ]]; then
- # Link output files for RUN = gfs
-
- # TODO: make these forecast output files consistent w/ GFS output
- # TODO: Work w/ NB to determine appropriate naming convention for these files
-
- # TODO: consult w/ NB on how to improve on this. Gather requirements and more information on what these files are and how they are used to properly catalog them
- local vdate seconds vdatestr fhr last_fhr
- for fhr in ${FV3_OUTPUT_FH}; do
- vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y%m%d%H)
- seconds=$(to_seconds "${vdate:8:2}0000") # convert HHMMSS to seconds
- vdatestr="${vdate:0:4}-${vdate:4:2}-${vdate:6:2}-${seconds}"
-
- if [[ 10#${fhr} -eq 0 ]]; then
- ${NLN} "${COM_ICE_HISTORY}/iceic${vdate}.${ENSMEM}.${current_cycle}.nc" "${DATA}/CICE_OUTPUT/iceh_ic.${vdatestr}.nc"
- else
- (( interval = fhr - last_fhr )) # Umm.. isn't this CICE_HISTFREQ_N in hours (currently set to FHOUT)?
- ${NLN} "${COM_ICE_HISTORY}/ice${vdate}.${ENSMEM}.${current_cycle}.nc" "${DATA}/CICE_OUTPUT/iceh_$(printf "%0.2d" "${interval}")h.${vdatestr}.nc"
- fi
+ ${NLN} "${FIXgfs}/cice/${ICERES}/${CICE_GRID}" "${DATA}/"
+ ${NLN} "${FIXgfs}/cice/${ICERES}/${CICE_MASK}" "${DATA}/"
+ ${NLN} "${FIXgfs}/cice/${ICERES}/${MESH_ICE}" "${DATA}/"
+
+ # Link iceh_ic file to COM. This is the initial condition file from CICE (f000)
+ # TODO: Is this file needed in COM? Is this going to be used for generating any products?
+ local vdate seconds vdatestr fhr fhr3 interval last_fhr
+ seconds=$(to_seconds "${current_cycle:8:2}0000") # convert HHMMSS to seconds
+ vdatestr="${current_cycle:0:4}-${current_cycle:4:2}-${current_cycle:6:2}-${seconds}"
+ ${NLN} "${COM_ICE_HISTORY}/${RUN}.ice.t${cyc}z.ic.nc" "${DATA}/CICE_OUTPUT/iceh_ic.${vdatestr}.nc"
+
+ # Link CICE forecast output files from DATA/CICE_OUTPUT to COM
+ local source_file dest_file
+ for fhr in ${CICE_OUTPUT_FH}; do
+ fhr3=$(printf %03i "${fhr}")
+
+ if [[ -z ${last_fhr:-} ]]; then
last_fhr=${fhr}
- done
+ continue
+ fi
- elif [[ "${RUN}" =~ "gdas" ]]; then
+ (( interval = fhr - last_fhr ))
- # Link CICE generated initial condition file from DATA/CICE_OUTPUT to COMOUTice
- # This can be thought of as the f000 output from the CICE model
- local seconds vdatestr
- seconds=$(to_seconds "${current_cycle:8:2}0000") # convert HHMMSS to seconds
- vdatestr="${current_cycle:0:4}-${current_cycle:4:2}-${current_cycle:6:2}-${seconds}"
- ${NLN} "${COM_ICE_HISTORY}/${RUN}.t${cyc}z.iceic.nc" "${DATA}/CICE_OUTPUT/iceh_ic.${vdatestr}.nc"
-
- # Link instantaneous CICE forecast output files from DATA/CICE_OUTPUT to COMOUTice
- local vdate vdatestr seconds fhr fhr3
- fhr="${FHOUT}"
- while [[ "${fhr}" -le "${FHMAX}" ]]; do
- vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y%m%d%H)
- seconds=$(to_seconds "${vdate:8:2}0000") # convert HHMMSS to seconds
- vdatestr="${vdate:0:4}-${vdate:4:2}-${vdate:6:2}-${seconds}"
- fhr3=$(printf %03i "${fhr}")
- ${NLN} "${COM_ICE_HISTORY}/${RUN}.t${cyc}z.icef${fhr3}.nc" "${DATA}/CICE_OUTPUT/iceh_inst.${vdatestr}.nc"
- fhr=$((fhr + FHOUT))
- done
+ vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y%m%d%H)
+ seconds=$(to_seconds "${vdate:8:2}0000") # convert HHMMSS to seconds
+ vdatestr="${vdate:0:4}-${vdate:4:2}-${vdate:6:2}-${seconds}"
- fi
+ if [[ "${RUN}" =~ "gfs" || "${RUN}" =~ "gefs" ]]; then
+ source_file="iceh_$(printf "%0.2d" "${interval}")h.${vdatestr}.nc"
+ dest_file="${RUN}.ice.t${cyc}z.${interval}hr_avg.f${fhr3}.nc"
+ elif [[ "${RUN}" =~ "gdas" ]]; then
+ source_file="iceh_inst.${vdatestr}.nc"
+ dest_file="${RUN}.ice.t${cyc}z.inst.f${fhr3}.nc"
+ fi
+ ${NLN} "${COM_ICE_HISTORY}/${dest_file}" "${DATA}/CICE_OUTPUT/${source_file}"
+
+ last_fhr=${fhr}
+ done
# Link CICE restarts from CICE_RESTART to COMOUTice/RESTART
# Loop over restart_interval and link restarts from DATA to COM
@@ -966,7 +892,6 @@ CICE_out() {
echo "SUB ${FUNCNAME[0]}: Copying output data for CICE"
# Copy ice_in namelist from DATA to COMOUTice after the forecast is run (and successfull)
- if [[ ! -d "${COM_ICE_INPUT}" ]]; then mkdir -p "${COM_ICE_INPUT}"; fi
${NCP} "${DATA}/ice_in" "${COM_CONF}/ufs.ice_in"
}
@@ -1004,8 +929,6 @@ GOCART_rc() {
GOCART_postdet() {
echo "SUB ${FUNCNAME[0]}: Linking output data for GOCART"
- if [[ ! -d "${COM_CHEM_HISTORY}" ]]; then mkdir -p "${COM_CHEM_HISTORY}"; fi
-
for fhr in ${FV3_OUTPUT_FH}; do
local vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y%m%d%H)
@@ -1033,6 +956,56 @@ GOCART_out() {
${NCP} "${DATA}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4" \
"${COM_CHEM_HISTORY}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4"
done
+}
+
+CMEPS_postdet() {
+ echo "SUB ${FUNCNAME[0]}: Linking output data for CMEPS mediator"
+
+ # Copy mediator restart files to RUNDIR
+ if [[ "${warm_start}" = ".true." ]]; then
+ local mediator_file="${COM_MED_RESTART}/${PDY}.${cyc}0000.ufs.cpld.cpl.r.nc"
+ if [[ -f "${mediator_file}" ]]; then
+ ${NCP} "${mediator_file}" "${DATA}/ufs.cpld.cpl.r.nc"
+ rm -f "${DATA}/rpointer.cpl"
+ touch "${DATA}/rpointer.cpl"
+ echo "ufs.cpld.cpl.r.nc" >> "${DATA}/rpointer.cpl"
+ else
+ # We have a choice to make here.
+ # Either we can FATAL ERROR out, or we can let the coupling fields initialize from zero
+ # cmeps_run_type is determined based on the availability of the mediator restart file
+ echo "WARNING: ${mediator_file} does not exist for warm_start = .true., initializing!"
+ #echo "FATAL ERROR: ${mediator_file} must exist for warm_start = .true. and does not, ABORT!"
+ #exit 4
+ fi
+ fi
+ # Link mediator restarts from DATA to COM
+ # DANGER DANGER DANGER - Linking mediator restarts to COM causes the model to fail with a message like this below:
+ # Abort with message NetCDF: File exists && NC_NOCLOBBER in file pio-2.5.7/src/clib/pioc_support.c at line 2173
+ # Instead of linking, copy the mediator files after the model finishes. See CMEPS_out() below.
+ #local rdate rdatestr seconds mediator_file
+ #rdate=${forecast_end_cycle}
+ #seconds=$(to_seconds "${rdate:8:2}"0000) # use function to_seconds from forecast_predet.sh to convert HHMMSS to seconds
+ #rdatestr="${rdate:0:4}-${rdate:4:2}-${rdate:6:2}-${seconds}"
+ #${NLN} "${COM_MED_RESTART}/${rdate:0:8}.${rdate:8:2}0000.ufs.cpld.cpl.r.nc" "${DATA}/CMEPS_RESTART/ufs.cpld.cpl.r.${rdatestr}.nc"
+
+}
+
+CMEPS_out() {
+ echo "SUB ${FUNCNAME[0]}: Copying output data for CMEPS mediator"
+
+ # Linking mediator restarts to COM causes the model to fail with a message.
+ # Abort with message NetCDF: File exists && NC_NOCLOBBER in file pio-2.5.7/src/clib/pioc_support.c at line 2173
+ # Copy mediator restarts from DATA to COM
+ local rdate rdatestr seconds mediator_file
+ rdate=${forecast_end_cycle}
+ seconds=$(to_seconds "${rdate:8:2}"0000) # use function to_seconds from forecast_predet.sh to convert HHMMSS to seconds
+ rdatestr="${rdate:0:4}-${rdate:4:2}-${rdate:6:2}-${seconds}"
+ mediator_file="${DATA}/CMEPS_RESTART/ufs.cpld.cpl.r.${rdatestr}.nc"
+ if [[ -f ${mediator_file} ]]; then
+ ${NCP} "${mediator_file}" "${COM_MED_RESTART}/${rdate:0:8}.${rdate:8:2}0000.ufs.cpld.cpl.r.nc"
+ else
+ echo "Mediator restart ${mediator_file} not found."
+ fi
}
diff --git a/ush/forecast_predet.sh b/ush/forecast_predet.sh
index 8f46ed6ea0..ab02270b46 100755
--- a/ush/forecast_predet.sh
+++ b/ush/forecast_predet.sh
@@ -8,9 +8,6 @@
## This script is a definition of functions.
#####
-# For all non-evironment variables
-# Cycling and forecast hour specific parameters
-
to_seconds() {
# Function to convert HHMMSS to seconds since 00Z
local hhmmss=${1:?}
@@ -50,21 +47,12 @@ common_predet(){
# shellcheck disable=SC2034
pwd=$(pwd)
CDUMP=${CDUMP:-gdas}
- CASE=${CASE:-C768}
- CDATE=${CDATE:-2017032500}
+ CASE=${CASE:-C96}
+ CDATE=${CDATE:-"${PDY}${cyc}"}
ENSMEM=${ENSMEM:-000}
- FCSTEXECDIR=${FCSTEXECDIR:-${HOMEgfs}/exec}
- FCSTEXEC=${FCSTEXEC:-ufs_model.x}
-
- # Directories.
- FIXgfs=${FIXgfs:-${HOMEgfs}/fix}
-
- # Model specific stuff
- PARM_POST=${PARM_POST:-${HOMEgfs}/parm/post}
-
# Define significant cycles
- current_cycle=${CDATE}
+ current_cycle="${PDY}${cyc}"
previous_cycle=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} - ${assim_freq} hours" +%Y%m%d%H)
# ignore errors that variable isn't used
# shellcheck disable=SC2034
@@ -88,23 +76,28 @@ common_predet(){
tcyc=${scyc}
fi
- mkdir -p "${COM_CONF}"
+ FHMIN=${FHMIN:-0}
+ FHMAX=${FHMAX:-9}
+ FHOUT=${FHOUT:-3}
+ FHMAX_HF=${FHMAX_HF:-0}
+ FHOUT_HF=${FHOUT_HF:-1}
+
+ # Several model components share DATA/INPUT for input data
+ if [[ ! -d "${DATA}/INPUT" ]]; then mkdir -p "${DATA}/INPUT"; fi
+
+ if [[ ! -d "${COM_CONF}" ]]; then mkdir -p "${COM_CONF}"; fi
cd "${DATA}" || ( echo "FATAL ERROR: Unable to 'cd ${DATA}', ABORT!"; exit 8 )
}
FV3_predet(){
echo "SUB ${FUNCNAME[0]}: Defining variables for FV3"
- FHMIN=${FHMIN:-0}
- FHMAX=${FHMAX:-9}
- FHOUT=${FHOUT:-3}
+
+ if [[ ! -d "${COM_ATMOS_HISTORY}" ]]; then mkdir -p "${COM_ATMOS_HISTORY}"; fi
+ if [[ ! -d "${COM_ATMOS_MASTER}" ]]; then mkdir -p "${COM_ATMOS_MASTER}"; fi
+ if [[ ! -d "${COM_ATMOS_RESTART}" ]]; then mkdir -p "${COM_ATMOS_RESTART}"; fi
+
FHZER=${FHZER:-6}
FHCYC=${FHCYC:-24}
- FHMAX_HF=${FHMAX_HF:-0}
- FHOUT_HF=${FHOUT_HF:-1}
- NSOUT=${NSOUT:-"-1"}
- FDIAG=${FHOUT}
- if (( FHMAX_HF > 0 && FHOUT_HF > 0 )); then FDIAG=${FHOUT_HF}; fi
- WRITE_DOPOST=${WRITE_DOPOST:-".false."}
restart_interval=${restart_interval:-${FHMAX}}
# restart_interval = 0 implies write restart at the END of the forecast i.e. at FHMAX
if [[ ${restart_interval} -eq 0 ]]; then
@@ -112,8 +105,8 @@ FV3_predet(){
fi
# Convert output settings into an explicit list for FV3
- # NOTE: FV3_OUTPUT_FH is also currently used in other components
- # TODO: Have a seperate control for other components to address issue #1629
+ # Ignore "not used" warning
+ # shellcheck disable=SC2034
FV3_OUTPUT_FH=""
local fhr=${FHMIN}
if (( FHOUT_HF > 0 && FHMAX_HF > 0 )); then
@@ -122,12 +115,6 @@ FV3_predet(){
fi
FV3_OUTPUT_FH="${FV3_OUTPUT_FH} $(seq -s ' ' "${fhr}" "${FHOUT}" "${FHMAX}")"
- # Model resolution specific parameters
- DELTIM=${DELTIM:-225}
- layout_x=${layout_x:-8}
- layout_y=${layout_y:-16}
- LEVS=${LEVS:-65}
-
# Other options
# ignore errors that variable isn't used
# shellcheck disable=SC2034
@@ -141,18 +128,8 @@ FV3_predet(){
# Model config options
ntiles=6
- TYPE=${TYPE:-"nh"} # choices: nh, hydro
- MONO=${MONO:-"non-mono"} # choices: mono, non-mono
-
- QUILTING=${QUILTING:-".true."}
- OUTPUT_GRID=${OUTPUT_GRID:-"gaussian_grid"}
- WRITE_NEMSIOFLIP=${WRITE_NEMSIOFLIP:-".true."}
- WRITE_FSYNCFLAG=${WRITE_FSYNCFLAG:-".true."}
-
rCDUMP=${rCDUMP:-${CDUMP}}
- mkdir -p "${DATA}/INPUT"
-
#------------------------------------------------------------------
# changeable parameters
# dycore definitions
@@ -210,8 +187,7 @@ FV3_predet(){
print_freq=${print_freq:-6}
#-------------------------------------------------------
- if [[ ${RUN} =~ "gfs" || ${RUN} = "gefs" ]]; then
- if [[ ! -d ${COM_ATMOS_RESTART} ]]; then mkdir -p "${COM_ATMOS_RESTART}" ; fi
+ if [[ "${RUN}" =~ "gfs" || "${RUN}" = "gefs" ]]; then
${NLN} "${COM_ATMOS_RESTART}" RESTART
# The final restart written at the end doesn't include the valid date
# Create links that keep the same name pattern for these files
@@ -225,26 +201,68 @@ FV3_predet(){
${NLN} "${file}" "${COM_ATMOS_RESTART}/${forecast_end_cycle:0:8}.${forecast_end_cycle:8:2}0000.${file}"
done
else
- mkdir -p "${DATA}/RESTART"
+ if [[ ! -d "${DATA}/RESTART" ]]; then mkdir -p "${DATA}/RESTART"; fi
fi
- echo "SUB ${FUNCNAME[0]}: pre-determination variables set"
}
WW3_predet(){
echo "SUB ${FUNCNAME[0]}: WW3 before run type determination"
+
+ if [[ ! -d "${COM_WAVE_HISTORY}" ]]; then mkdir -p "${COM_WAVE_HISTORY}"; fi
if [[ ! -d "${COM_WAVE_RESTART}" ]]; then mkdir -p "${COM_WAVE_RESTART}" ; fi
+
${NLN} "${COM_WAVE_RESTART}" "restart_wave"
}
CICE_predet(){
echo "SUB ${FUNCNAME[0]}: CICE before run type determination"
+
+ if [[ ! -d "${COM_ICE_HISTORY}" ]]; then mkdir -p "${COM_ICE_HISTORY}"; fi
+ if [[ ! -d "${COM_ICE_RESTART}" ]]; then mkdir -p "${COM_ICE_RESTART}"; fi
+ if [[ ! -d "${COM_ICE_INPUT}" ]]; then mkdir -p "${COM_ICE_INPUT}"; fi
+
if [[ ! -d "${DATA}/CICE_OUTPUT" ]]; then mkdir -p "${DATA}/CICE_OUTPUT"; fi
if [[ ! -d "${DATA}/CICE_RESTART" ]]; then mkdir -p "${DATA}/CICE_RESTART"; fi
+
+ # CICE does not have a concept of high frequency output like FV3
+ # Convert output settings into an explicit list for CICE
+ # Ignore "not used" warning
+ # shellcheck disable=SC2034
+ CICE_OUTPUT_FH=$(seq -s ' ' "${FHMIN}" "${FHOUT_OCNICE}" "${FHMAX}")
+
}
MOM6_predet(){
echo "SUB ${FUNCNAME[0]}: MOM6 before run type determination"
+
+ if [[ ! -d "${COM_OCEAN_HISTORY}" ]]; then mkdir -p "${COM_OCEAN_HISTORY}"; fi
+ if [[ ! -d "${COM_OCEAN_RESTART}" ]]; then mkdir -p "${COM_OCEAN_RESTART}"; fi
+ if [[ ! -d "${COM_OCEAN_INPUT}" ]]; then mkdir -p "${COM_OCEAN_INPUT}"; fi
+
if [[ ! -d "${DATA}/MOM6_OUTPUT" ]]; then mkdir -p "${DATA}/MOM6_OUTPUT"; fi
if [[ ! -d "${DATA}/MOM6_RESTART" ]]; then mkdir -p "${DATA}/MOM6_RESTART"; fi
+
+ # MOM6 does not have a concept of high frequency output like FV3
+ # Convert output settings into an explicit list for MOM6
+ # Ignore "not used" warning
+ # shellcheck disable=SC2034
+ MOM6_OUTPUT_FH=$(seq -s ' ' "${FHMIN}" "${FHOUT_OCNICE}" "${FHMAX}")
+
+}
+
+CMEPS_predet(){
+ echo "SUB ${FUNCNAME[0]}: CMEPS before run type determination"
+
+ if [[ ! -d "${COM_MED_RESTART}" ]]; then mkdir -p "${COM_MED_RESTART}"; fi
+
+ if [[ ! -d "${DATA}/CMEPS_RESTART" ]]; then mkdir -p "${DATA}/CMEPS_RESTART"; fi
+
+}
+
+GOCART_predet(){
+ echo "SUB ${FUNCNAME[0]}: GOCART before run type determination"
+
+ if [[ ! -d "${COM_CHEM_HISTORY}" ]]; then mkdir -p "${COM_CHEM_HISTORY}"; fi
+
}
diff --git a/ush/fv3gfs_remap.sh b/ush/fv3gfs_remap.sh
index 7986add331..eaf2c64a01 100755
--- a/ush/fv3gfs_remap.sh
+++ b/ush/fv3gfs_remap.sh
@@ -14,8 +14,6 @@ export GG=${master_grid:-"0p25deg"} # 1deg 0p5deg 0p25deg 0p125deg
pwd=$(pwd)
export DATA=${DATA:-$pwd}
export HOMEgfs=${HOMEgfs:-$PACKAGEROOT}
-export FIXgfs=${FIXgfs:-$HOMEgfs/fix}
-export FIXorog=${FIXorog:-$FIXgfs/orog}
export REMAPEXE=${REMAPEXE:-$HOMEgfs/exec/fregrid_parallel}
export IPD4=${IPD4:-"YES"}
@@ -25,8 +23,8 @@ export CDUMP=${CDUMP:-gfs}
export PREFIX=${PREFIX:-${CDUMP}${TCYC}}
#--------------------------------------------------
-export grid_loc=${FIXorog}/${CASE}/${CASE}_mosaic.nc
-export weight_file=${FIXorog}/${CASE}/remap_weights_${CASE}_${GG}.nc
+export grid_loc=${FIXgfs}/orog/${CASE}/${CASE}_mosaic.nc
+export weight_file=${FIXgfs}/orog/${CASE}/remap_weights_${CASE}_${GG}.nc
export APRUN_REMAP=${APRUN_REMAP:-${APRUN:-""}}
export NTHREADS_REMAP=${NTHREADS_REMAP:-${NTHREADS:-1}}
diff --git a/ush/gaussian_sfcanl.sh b/ush/gaussian_sfcanl.sh
index 1a0441a06f..79b2cb5b7d 100755
--- a/ush/gaussian_sfcanl.sh
+++ b/ush/gaussian_sfcanl.sh
@@ -27,10 +27,6 @@
# Default is /nwprod2.
# HOMEgfs Directory for gfs version. Default is
# $BASEDIR/gfs_ver.v15.0.0}
-# FIXam Directory for the global fixed climatology files.
-# Defaults to $HOMEgfs/fix/am
-# FIXorog Directory for the model grid and orography netcdf
-# files. Defaults to $HOMEgfs/fix/orog
# FIXWGTS Weight file to use for interpolation
# EXECgfs Directory of the program executable. Defaults to
# $HOMEgfs/exec
@@ -83,9 +79,9 @@
#
# programs : $GAUSFCANLEXE
#
-# fixed data : ${FIXorog}/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile*.nc
+# fixed data : ${FIXgfs}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile*.nc
# ${FIXWGTS}
-# ${FIXam}/global_hyblev.l65.txt
+# ${FIXgfs}/am/global_hyblev.l65.txt
#
# input data : ${COM_ATMOS_RESTART}/${PDY}.${cyc}0000.sfcanl_data.tile*.nc
#
@@ -126,15 +122,13 @@ gfs_ver=${gfs_ver:-v16.3.0}
BASEDIR=${BASEDIR:-${PACKAGEROOT:-/lfs/h1/ops/prod/packages}}
HOMEgfs=${HOMEgfs:-$BASEDIR/gfs.${gfs_ver}}
EXECgfs=${EXECgfs:-$HOMEgfs/exec}
-FIXorog=${FIXorog:-$HOMEgfs/fix/orog}
-FIXam=${FIXam:-$HOMEgfs/fix/am}
-FIXWGTS=${FIXWGTS:-$FIXorog/$CASE/fv3_SCRIP_${CASE}_GRIDSPEC_lon${LONB_SFC}_lat${LATB_SFC}.gaussian.neareststod.nc}
+FIXWGTS=${FIXWGTS:-${FIXgfs}/orog/${CASE}/fv3_SCRIP_${CASE}_GRIDSPEC_lon${LONB_SFC}_lat${LATB_SFC}.gaussian.neareststod.nc}
DATA=${DATA:-$(pwd)}
# Filenames.
XC=${XC:-}
GAUSFCANLEXE=${GAUSFCANLEXE:-$EXECgfs/gaussian_sfcanl.x}
-SIGLEVEL=${SIGLEVEL:-$FIXam/global_hyblev.l${LEVSP1}.txt}
+SIGLEVEL=${SIGLEVEL:-${FIXgfs}/am/global_hyblev.l${LEVSP1}.txt}
CDATE=${CDATE:?}
@@ -187,12 +181,12 @@ ${NLN} "${COM_ATMOS_RESTART}/${PDY}.${cyc}0000.sfcanl_data.tile5.nc" "./anal.til
${NLN} "${COM_ATMOS_RESTART}/${PDY}.${cyc}0000.sfcanl_data.tile6.nc" "./anal.tile6.nc"
# input orography tiles
-${NLN} "${FIXorog}/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile1.nc" "./orog.tile1.nc"
-${NLN} "${FIXorog}/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile2.nc" "./orog.tile2.nc"
-${NLN} "${FIXorog}/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile3.nc" "./orog.tile3.nc"
-${NLN} "${FIXorog}/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile4.nc" "./orog.tile4.nc"
-${NLN} "${FIXorog}/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile5.nc" "./orog.tile5.nc"
-${NLN} "${FIXorog}/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile6.nc" "./orog.tile6.nc"
+${NLN} "${FIXgfs}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile1.nc" "./orog.tile1.nc"
+${NLN} "${FIXgfs}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile2.nc" "./orog.tile2.nc"
+${NLN} "${FIXgfs}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile3.nc" "./orog.tile3.nc"
+${NLN} "${FIXgfs}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile4.nc" "./orog.tile4.nc"
+${NLN} "${FIXgfs}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile5.nc" "./orog.tile5.nc"
+${NLN} "${FIXgfs}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile6.nc" "./orog.tile6.nc"
${NLN} "${SIGLEVEL}" "./vcoord.txt"
diff --git a/ush/gfs_sndp.sh b/ush/gfs_sndp.sh
index 99c5c68fa3..e07b3eb42c 100755
--- a/ush/gfs_sndp.sh
+++ b/ush/gfs_sndp.sh
@@ -16,7 +16,7 @@ source "$HOMEgfs/ush/preamble.sh"
export m=$1
mkdir $DATA/$m
cd $DATA/$m
- cp $FIXbufrsnd/gfs_collective${m}.list $DATA/$m/.
+ cp ${HOMEgfs}/fix/product/gfs_collective${m}.list $DATA/$m/.
CCCC=KWBC
file_list=gfs_collective${m}.list
diff --git a/ush/gfs_truncate_enkf.sh b/ush/gfs_truncate_enkf.sh
index 0a7d6fc0dd..8aa47e26e2 100755
--- a/ush/gfs_truncate_enkf.sh
+++ b/ush/gfs_truncate_enkf.sh
@@ -14,17 +14,16 @@ mkdir -p $DATATMP
cd $DATATMP
export LEVS=${LEVS_LORES:-64}
-export FIXam=${FIXam:-$HOMEgfs/fix/am}
export CHGRESSH=${CHGRESSH:-${USHgfs}/global_chgres.sh}
export CHGRESEXEC=${CHGRESEXEC-${EXECgfs}/global_chgres}
-export OROGRAPHY=${OROGRAPHY_LORES:-$FIXam/global_orography.t$JCAP.$LONB.$LATB.grb}
-export OROGRAPHY_UF=${OROGRAPHY_UF_LORES:-$FIXam/global_orography_uf.t$JCAP.$LONB.$LATB.grb}
-export LONSPERLAT=${LONSPERLAT_LORES:-$FIXam/global_lonsperlat.t${JCAP}.$LONB.$LATB.txt}
-export SLMASK=${SLMASK_LORES:-$FIXam/global_slmask.t$JCAP.$LONB.$LATB.grb}
-export MTNVAR=${MTNVAR_LORES:-$FIXam/global_mtnvar.t$JCAP.$LONB.$LATB.f77}
-export SIGLEVEL=${SIGLEVEL_LORES:-$FIXam/global_hyblev.l${LEVS}.txt}
-export O3CLIM=${O3CLIM:-$FIXam/global_o3clim.txt}
+export OROGRAPHY=${OROGRAPHY_LORES:-${FIXgfs}/am/global_orography.t$JCAP.$LONB.$LATB.grb}
+export OROGRAPHY_UF=${OROGRAPHY_UF_LORES:-${FIXgfs}/am/global_orography_uf.t$JCAP.$LONB.$LATB.grb}
+export LONSPERLAT=${LONSPERLAT_LORES:-${FIXgfs}/am/global_lonsperlat.t${JCAP}.$LONB.$LATB.txt}
+export SLMASK=${SLMASK_LORES:-${FIXgfs}/am/global_slmask.t$JCAP.$LONB.$LATB.grb}
+export MTNVAR=${MTNVAR_LORES:-${FIXgfs}/am/global_mtnvar.t$JCAP.$LONB.$LATB.f77}
+export SIGLEVEL=${SIGLEVEL_LORES:-${FIXgfs}/am/global_hyblev.l${LEVS}.txt}
+export O3CLIM=${O3CLIM:-${FIXgfs}/am/global_o3clim.txt}
use_ufo=.true.
diff --git a/ush/hpssarch_gen.sh b/ush/hpssarch_gen.sh
index f1beb9469d..903c2d63fb 100755
--- a/ush/hpssarch_gen.sh
+++ b/ush/hpssarch_gen.sh
@@ -251,48 +251,64 @@ if [[ ${type} = "gfs" ]]; then
} >> "${DATA}/gfswave.txt"
fi
- if [[ ${DO_OCN} = "YES" ]]; then
+ if [[ "${DO_OCN}" == "YES" ]]; then
- head="gfs.t${cyc}z."
+ head="gfs.ocean.t${cyc}z."
+ rm -f "${DATA}/ocean_6hravg.txt"; touch "${DATA}/ocean_6hravg.txt"
+ rm -f "${DATA}/ocean_daily.txt"; touch "${DATA}/ocean_daily.txt"
+ rm -f "${DATA}/ocean_grib2.txt"; touch "${DATA}/ocean_grib2.txt"
- rm -f "${DATA}/gfs_flux_1p00.txt"
- rm -f "${DATA}/ocn_ice_grib2_0p5.txt"
- rm -f "${DATA}/ocn_ice_grib2_0p25.txt"
- rm -f "${DATA}/ocn_2D.txt"
- rm -f "${DATA}/ocn_3D.txt"
- rm -f "${DATA}/ocn_xsect.txt"
- rm -f "${DATA}/ocn_daily.txt"
- touch "${DATA}/gfs_flux_1p00.txt"
- touch "${DATA}/ocn_ice_grib2_0p5.txt"
- touch "${DATA}/ocn_ice_grib2_0p25.txt"
- touch "${DATA}/ocn_2D.txt"
- touch "${DATA}/ocn_3D.txt"
- touch "${DATA}/ocn_xsect.txt"
- touch "${DATA}/ocn_daily.txt"
- echo "${COM_OCEAN_INPUT/${ROTDIR}\//}/MOM_input" >> "${DATA}/ocn_2D.txt"
- echo "${COM_OCEAN_2D/${ROTDIR}\//}/ocn_2D*" >> "${DATA}/ocn_2D.txt"
- echo "${COM_OCEAN_3D/${ROTDIR}\//}/ocn_3D*" >> "${DATA}/ocn_3D.txt"
- echo "${COM_OCEAN_XSECT/${ROTDIR}\//}/ocn*EQ*" >> "${DATA}/ocn_xsect.txt"
- echo "${COM_OCEAN_HISTORY/${ROTDIR}\//}/ocn_daily*" >> "${DATA}/ocn_daily.txt"
- echo "${COM_OCEAN_GRIB_0p50/${ROTDIR}\//}/ocn_ice*0p5x0p5.grb2" >> "${DATA}/ocn_ice_grib2_0p5.txt"
- echo "${COM_OCEAN_GRIB_0p25/${ROTDIR}\//}/ocn_ice*0p25x0p25.grb2" >> "${DATA}/ocn_ice_grib2_0p25.txt"
+ echo "${COM_OCEAN_HISTORY/${ROTDIR}\//}/${head}6hr_avg.f*.nc" >> "${DATA}/ocean_6hravg.txt"
+ echo "${COM_OCEAN_HISTORY/${ROTDIR}\//}/${head}daily.f*.nc" >> "${DATA}/ocean_daily.txt"
+
+ {
+ if [[ -d "${COM_OCEAN_GRIB}/5p00" ]]; then
+ echo "${COM_OCEAN_GRIB/${ROTDIR}\//}/5p00/${head}5p00.f*.grib2"
+ echo "${COM_OCEAN_GRIB/${ROTDIR}\//}/5p00/${head}5p00.f*.grib2.idx"
+ fi
+ if [[ -d "${COM_OCEAN_GRIB}/1p00" ]]; then
+ echo "${COM_OCEAN_GRIB/${ROTDIR}\//}/1p00/${head}1p00.f*.grib2"
+ echo "${COM_OCEAN_GRIB/${ROTDIR}\//}/1p00/${head}1p00.f*.grib2.idx"
+ fi
+ if [[ -d "${COM_OCEAN_GRIB}/0p25" ]]; then
+ echo "${COM_OCEAN_GRIB/${ROTDIR}\//}/0p25/${head}0p25.f*.grib2"
+ echo "${COM_OCEAN_GRIB/${ROTDIR}\//}/0p25/${head}0p25.f*.grib2.idx"
+ fi
+ } >> "${DATA}/ocean_grib2.txt"
# Also save fluxes from atmosphere
+ head="gfs.t${cyc}z."
+ rm -f "${DATA}/gfs_flux_1p00.txt"; touch "${DATA}/gfs_flux_1p00.txt"
{
echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}flux.1p00.f???"
echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}flux.1p00.f???.idx"
} >> "${DATA}/gfs_flux_1p00.txt"
fi
- if [[ ${DO_ICE} = "YES" ]]; then
- head="gfs.t${cyc}z."
+ if [[ "${DO_ICE}" == "YES" ]]; then
+ head="gfs.ice.t${cyc}z."
+ rm -f "${DATA}/ice_6hravg.txt"; touch "${DATA}/ice_6hravg.txt"
+ rm -f "${DATA}/ice_grib2.txt"; touch "${DATA}/ice_grib2.txt"
- rm -f "${DATA}/ice.txt"
- touch "${DATA}/ice.txt"
{
- echo "${COM_ICE_INPUT/${ROTDIR}\//}/ice_in"
- echo "${COM_ICE_HISTORY/${ROTDIR}\//}/ice*nc"
- } >> "${DATA}/ice.txt"
+ echo "${COM_ICE_HISTORY/${ROTDIR}\//}/${head}ic.nc"
+ echo "${COM_ICE_HISTORY/${ROTDIR}\//}/${head}6hr_avg.f*.nc"
+ } >> "${DATA}/ice_6hravg.txt"
+
+ {
+ if [[ -d "${COM_ICE_GRIB}/5p00" ]]; then
+ echo "${COM_ICE_GRIB/${ROTDIR}\//}/5p00/${head}5p00.f*.grib2"
+ echo "${COM_ICE_GRIB/${ROTDIR}\//}/5p00/${head}5p00.f*.grib2.idx"
+ fi
+ if [[ -d "${COM_ICE_GRIB}/1p00" ]]; then
+ echo "${COM_ICE_GRIB/${ROTDIR}\//}/1p00/${head}1p00.f*.grib2"
+ echo "${COM_ICE_GRIB/${ROTDIR}\//}/1p00/${head}1p00.f*.grib2.idx"
+ fi
+ if [[ -d "${COM_ICE_GRIB}/0p25" ]]; then
+ echo "${COM_ICE_GRIB/${ROTDIR}\//}/0p25/${head}0p25.f*.grib2"
+ echo "${COM_ICE_GRIB/${ROTDIR}\//}/0p25/${head}0p25.f*.grib2.idx"
+ fi
+ } >> "${DATA}/ice_grib2.txt"
fi
if [[ ${DO_AERO} = "YES" ]]; then
@@ -766,4 +782,3 @@ fi ##end of enkfgdas or enkfgfs
#-----------------------------------------------------
exit 0
-
diff --git a/ush/icepost.ncl b/ush/icepost.ncl
deleted file mode 100755
index ad102971c4..0000000000
--- a/ush/icepost.ncl
+++ /dev/null
@@ -1,382 +0,0 @@
-;------------------------------------------------------------------
-; Denise.Worthen@noaa.gov (Feb 2019)
-;
-; This script will remap CICE5 output on the tripole grid to
-; a set of rectilinear grids using pre-computed ESMF weights to remap
-; the listed fields to the destination grid and write the results
-; to a new netCDF file
-;
-; See ocnpost.ncl for a complete description
-;
-; Bin.Li@noaa.gov (May 2019)
-; This script is revised to be used in the coupled workflow.
-; Revised parts are marked by
-
- load "$NCARG_ROOT/lib/ncarg/nclscripts/esmf/ESMF_regridding.ncl"
-
-;----------------------------------------------------------------------
-begin
-
-;************************************************
-; specify parameters
-;************************************************
-;
-
- output_masks = False
- ; destination grid sizes and name
- dsttype = (/"rect."/)
- ;dstgrds = (/"1p0", "0p5", "0p25"/)
-;
-
- ; specify a location to use
- ; nemsrc = "/scratch4/NCEPDEV/ocean/save/Denise.Worthen/NEMS_INPUT0.1/ocnicepost/"
- ; interpolation methods
- methods = (/"bilinear" ,"conserve"/)
- ; ocean model output location
- ;dirsrc = "/scratch3/NCEPDEV/stmp2/Denise.Worthen/BM1_ice/"
-
-
- ; variables to be regridded with the native tripole stagger location
-
- varlist = (/ (/ "hi_h", "Ct", "bilinear"/) \
- ,(/ "hs_h", "Ct", "bilinear"/) \
- ,(/ "Tsfc_h", "Ct", "bilinear"/) \
- ,(/ "aice_h", "Ct", "bilinear"/) \
- ,(/ "sst_h", "Ct", "bilinear"/) \
- /)
- dims = dimsizes(varlist)
- nvars = dims(0)
- delete(dims)
- ;print(varlist)
-
- ; vectors to be regridded with the native tripole stagger location
- ; and dimensionality
- ; note: vectors are always unstaggered using bilinear weights, but can
- ; be remapped using conservative
- nvpairs = 1
- veclist = new( (/nvpairs,3,2/),"string")
- veclist = (/ (/ (/"uvel_h", "vvel_h"/), (/"Bu", "Bu"/), (/"bilinear", "bilinear"/) /) \
- /)
- ;print(veclist)
-
- begTime = get_cpu_time()
-;----------------------------------------------------------------------
-; make a list of the directories and files from the run
-;----------------------------------------------------------------------
-; idate = "20120101"
-; icefilelist = systemfunc("ls "+dirsrc+"gfs."+idate+"/00/"+"ice*.nc")
-; icef = addfiles(icefilelist,"r")
-; nfiles = dimsizes(icefilelist)
-;
-
- ; get the rotation angle
- angleT = icef[0]->ANGLET
-
- ; get a 2 dimensional fields for creating the interpolation mask
- ; the mask2d contain 1's on land and 0's at valid points.
- mask2d = where(ismissing(icef[0]->sst_h), 1.0, 0.0)
- ;printVarSummary(mask2d)
-
- ; create conformed rotation arrays to make vector rotations cleaner
- angleT2d=conform_dims(dimsizes(mask2d),angleT,(/1,2/))
-
-;----------------------------------------------------------------------
-; loop over the output resolutions
-;----------------------------------------------------------------------
-
- jj = 1
- ii = 0
-
- do jj = 0,dimsizes(dstgrds)-1
- ;outres = "_"+dstgrds(jj)+"x"+dstgrds(jj)
- outres = dstgrds(jj)+"x"+dstgrds(jj)
- outgrid = dstgrds(jj)
-
- ; regrid a field to obtain the output xy dimensions
- wgtsfile = nemsrc+"/"+"tripole.mx025.Ct.to."+dsttype+dstgrds(jj)+".bilinear.nc"
- tt = ESMF_regrid_with_weights(angleT,wgtsfile,False)
- tt!0 = "lat"
- tt!1 = "lon"
- lat = tt&lat
- lon = tt&lon
- dims = dimsizes(tt)
- nlat = dims(0)
- nlon = dims(1)
- print("fields will be remapped to destination grid size "\
- +nlon+" "+nlat)
-
- delete(tt)
- delete(dims)
-
- ; regrid the masks to obtain the interpolation masks.
- ; the mask2d contain 1's on land and 0's at valid points.
- ; when remapped, any mask value > 0 identifies land values that
- ; have crept into the field. remapped model fields are then
- ; masked with this interpolation mask
-
- wgtsfile = nemsrc+"/"+"tripole.mx025.Ct.to."+dsttype+dstgrds(jj)+".bilinear.nc"
- rgmask2d = ESMF_regrid_with_weights(mask2d, wgtsfile,False)
-
- if(output_masks)then
- testfile = "masks_"+dstgrds(jj)+".nc"
- system("/bin/rm -f "+testfile)
- ; create
- testcdf = addfile(testfile,"c")
- testcdf->rgmask2d = rgmask2d
- ; close
- delete(testcdf)
- end if
-
- ; create the interpolation mask
- rgmask2d = where(rgmask2d .gt. 0.0, rgmask2d@_FillValue, 1.0)
-
-;----------------------------------------------------------------------
-; loop over each file in the icefilelist
-;----------------------------------------------------------------------
-;
- ; retrieve the time stamp
- time = icef[0]->time
- delete(time@bounds)
-
-;----------------------------------------------------------------------
-; set up the output netcdf file
-;----------------------------------------------------------------------
-; system("/bin/rm -f " + outfile) ; remove if exists
-; outcdf = addfile (outfile, "c") ; open output file
-;
-;
-
- ; explicitly declare file definition mode. Improve efficiency.
- setfileoption(outcdf,"DefineMode",True)
-
- ; create global attributes of the file
- fAtt = True ; assign file attributes
- fAtt@creation_date = systemfunc ("date")
- fAtt@source_file = infile
- fileattdef( outcdf, fAtt ) ; copy file attributes
-
- ; predefine the coordinate variables and their dimensionality
- dimNames = (/"time", "lat", "lon"/)
- dimSizes = (/ -1 , nlat, nlon/)
- dimUnlim = (/ True , False, False/)
- filedimdef(outcdf,dimNames,dimSizes,dimUnlim)
-
- ; predefine the the dimensionality of the variables to be written out
- filevardef(outcdf, "time", typeof(time), getvardims(time))
- filevardef(outcdf, "lat", typeof(lat), getvardims(lat))
- filevardef(outcdf, "lon", typeof(lon), getvardims(lon))
-
- ; Copy attributes associated with each variable to the file
- filevarattdef(outcdf, "time", time)
- filevarattdef(outcdf, "lat", lat)
- filevarattdef(outcdf, "lon", lon)
-
- ; predefine variables
- do nv = 0,nvars-1
- varname = varlist(nv,0)
- odims = (/"time", "lat", "lon"/)
- ;print("creating variable "+varname+" in file")
- filevardef(outcdf, varname, "float", odims)
- delete(odims)
- end do
-
- do nv = 0,nvpairs-1
- do nn = 0,1
- vecname = veclist(nv,0,nn)
- odims = (/"time", "lat", "lon"/)
- ;print("creating variable "+vecname+" in file")
- filevardef(outcdf, vecname, "float", odims)
- delete(odims)
- end do
- end do
-
- ; explicitly exit file definition mode.
- setfileoption(outcdf,"DefineMode",False)
-
- lat=lat(::-1)
- ; write the dimensions to the file
- outcdf->time = (/time/)
- outcdf->lat = (/lat/)
- outcdf->lon = (/lon/)
-
-;----------------------------------------------------------------------
-; loop over nvars variables
-;----------------------------------------------------------------------
-
- ;nv = 1
- do nv = 0,nvars-1
- varname = varlist(nv,0)
- vargrid = varlist(nv,1)
- varmeth = varlist(nv,2)
-
- ;print(nv+" "+varname+" "+vargrid+" "+varmeth)
- icevar = icef[ii]->$varname$
- ndims = dimsizes(dimsizes(icevar))
- ;print(ndims+" "+dimsizes(icevar))
-
- if(vargrid .ne. "Ct")then
- ; print error if the variable is not on the Ct grid
- print("Variable is not on Ct grid")
- exit
- end if
-
- ; regrid to dsttype+dstgrd with method
- ;print("remapping "+varname+" to grid "+dsttype+dstgrds(jj))
- wgtsfile = nemsrc+"/"+"tripole.mx025.Ct.to."+dsttype+dstgrds(jj)+"."+varmeth+".nc"
-
- rgtt = ESMF_regrid_with_weights(icevar,wgtsfile,False)
- rgtt = where(ismissing(rgmask2d),icevar@_FillValue,rgtt)
- rgtt=rgtt(:,::-1,:)
-
- ; enter file definition mode to add variable attributes
- setfileoption(outcdf,"DefineMode",True)
- filevarattdef(outcdf, varname, rgtt)
- setfileoption(outcdf,"DefineMode",False)
-
-
- outcdf->$varname$ = (/rgtt/)
-
- delete(icevar)
- delete(rgtt)
-
- ; nv, loop over number of variables
- end do
-
-;----------------------------------------------------------------------
-;
-;----------------------------------------------------------------------
-
- ;nv = 0
- do nv = 0,nvpairs-1
- vecnames = veclist(nv,0,:)
- vecgrids = veclist(nv,1,:)
- vecmeth = veclist(nv,2,:)
- ;print(nv+" "+vecnames+" "+vecgrids+" "+vecmeth)
-
- ; create a vector pair list
- vecpairs = NewList("fifo")
- n = 0
- uvel = icef[ii]->$vecnames(n)$
- vecfld = where(ismissing(uvel),0.0,uvel)
- copy_VarAtts(uvel,vecfld)
- ;print("unstagger "+vecnames(n)+" from "+vecgrids(n)+" to Ct")
- wgtsfile = nemsrc+"/"+"tripole.mx025."+vecgrids(n)+".to.Ct.bilinear.nc"
- ut = ESMF_regrid_with_weights(vecfld,wgtsfile,False)
- delete(ut@remap)
-
- n = 1
- vvel = icef[ii]->$vecnames(n)$
- vecfld = where(ismissing(vvel),0.0,vvel)
- copy_VarAtts(vvel,vecfld)
- ;print("unstagger "+vecnames(n)+" from "+vecgrids(n)+" to Ct")
- wgtsfile = nemsrc+"/"+"tripole.mx025."+vecgrids(n)+".to.Ct.bilinear.nc"
- vt = ESMF_regrid_with_weights(vecfld,wgtsfile,False)
- delete(vt@remap)
-
- ListAppend(vecpairs,ut)
- ListAppend(vecpairs,vt)
- ;print(vecpairs)
-
- ; rotate
- ; first copy Metadata
- urot = vecpairs[0]
- vrot = vecpairs[1]
- urot = cos(angleT2d)*ut - sin(angleT2d)*vt
- vrot = sin(angleT2d)*ut + cos(angleT2d)*vt
-
- ; change attribute to indicate these are now rotated velocities
- urot@long_name=str_sub_str(urot@long_name,"(x)","zonal")
- vrot@long_name=str_sub_str(vrot@long_name,"(y)","meridional")
- ; copy back
- vecpairs[0] = urot
- vecpairs[1] = vrot
- delete([/urot, vrot/])
-
- ; remap
- do n = 0,1
- vecfld = vecpairs[n]
- ; regrid to dsttype+dstgrd with method
- ;print("remapping "+vecnames(n)+" to grid "+dsttype+dstgrds(jj))
- wgtsfile = nemsrc+"/"+"tripole.mx025.Ct.to."+dsttype+dstgrds(jj)+"."+vecmeth(n)+".nc"
-
- rgtt = ESMF_regrid_with_weights(vecfld,wgtsfile,False)
- rgtt = where(ismissing(rgmask2d),vecfld@_FillValue,rgtt)
- rgtt=rgtt(:,::-1,:)
-
- ; enter file definition mode to add variable attributes
- setfileoption(outcdf,"DefineMode",True)
- filevarattdef(outcdf, vecnames(n), rgtt)
- setfileoption(outcdf,"DefineMode",False)
-
- outcdf->$vecnames(n)$ = (/rgtt/)
- delete(rgtt)
- end do
- delete([/uvel,vvel,ut,vt,vecfld,vecpairs/])
- delete([/vecnames,vecgrids,vecmeth/])
- ; nv, loop over number of vector pairs
- end do
-
-;----------------------------------------------------------------------
-; close the outcdf and continue through filelist
-;----------------------------------------------------------------------
-
- delete(outcdf)
-
- ; ii, loop over files
- ;end do
- ;jj, loop over destination grids
- delete([/lat,lon,nlon,nlat/])
- delete([/rgmask2d/])
- end do
- print("One complete ice file in " + (get_cpu_time() - begTime) + " seconds")
-exit
-end
diff --git a/ush/load_fv3gfs_modules.sh b/ush/load_fv3gfs_modules.sh
index 48885c62e4..ae0e381db4 100755
--- a/ush/load_fv3gfs_modules.sh
+++ b/ush/load_fv3gfs_modules.sh
@@ -10,6 +10,7 @@ fi
ulimit_s=$( ulimit -S -s )
# Find module command and purge:
+source "${HOMEgfs}/ush/detect_machine.sh"
source "${HOMEgfs}/ush/module-setup.sh"
# Source versions file for runtime
@@ -18,36 +19,14 @@ source "${HOMEgfs}/versions/run.ver"
# Load our modules:
module use "${HOMEgfs}/modulefiles"
-if [[ -d /lfs/f1 ]]; then
- # We are on WCOSS2 (Cactus or Dogwood)
- module load module_base.wcoss2
-elif [[ -d /mnt/lfs1 ]] ; then
- # We are on NOAA Jet
- module load module_base.jet
-elif [[ -d /scratch1 ]] ; then
- # We are on NOAA Hera
- module load module_base.hera
-elif [[ -d /work ]] ; then
- # We are on MSU Orion or Hercules
- if [[ -d /apps/other ]] ; then
- # Hercules
- module load module_base.hercules
- else
- # Orion
- module load module_base.orion
- fi
-elif [[ -d /glade ]] ; then
- # We are on NCAR Yellowstone
- module load module_base.cheyenne
-elif [[ -d /lustre && -d /ncrc ]] ; then
- # We are on GAEA.
- module load module_base.gaea
-elif [[ -d /data/prod ]] ; then
- # We are on SSEC S4
- module load module_base.s4
-else
- echo WARNING: UNKNOWN PLATFORM
-fi
+case "${MACHINE_ID}" in
+ "wcoss2" | "hera" | "orion" | "hercules" | "gaea" | "jet" | "s4")
+ module load "module_base.${MACHINE_ID}"
+ ;;
+ *)
+ echo "WARNING: UNKNOWN PLATFORM"
+ ;;
+esac
module list
diff --git a/ush/load_ufsda_modules.sh b/ush/load_ufsda_modules.sh
index f15ae5666c..e8e72b8fbe 100755
--- a/ush/load_ufsda_modules.sh
+++ b/ush/load_ufsda_modules.sh
@@ -27,48 +27,26 @@ fi
ulimit_s=$( ulimit -S -s )
# Find module command and purge:
+source "${HOMEgfs}/ush/detect_machine.sh"
source "${HOMEgfs}/ush/module-setup.sh"
# Load our modules:
module use "${HOMEgfs}/sorc/gdas.cd/modulefiles"
-if [[ -d /lfs/f1 ]]; then
- # We are on WCOSS2 (Cactus or Dogwood)
- echo WARNING: UFSDA NOT SUPPORTED ON THIS PLATFORM
-elif [[ -d /lfs3 ]] ; then
- # We are on NOAA Jet
- echo WARNING: UFSDA NOT SUPPORTED ON THIS PLATFORM
-elif [[ -d /scratch1 ]] ; then
- # We are on NOAA Hera
- module load "${MODS}/hera"
- # set NETCDF variable based on ncdump location
- NETCDF=$( which ncdump )
- export NETCDF
-elif [[ -d /work ]] ; then
- # We are on MSU Orion or Hercules
- if [[ -d /apps/other ]] ; then
- # Hercules
- module load "${MODS}/hercules"
- else
- # Orion
- module load "${MODS}/orion"
- fi
- # set NETCDF variable based on ncdump location
- ncdump=$( which ncdump )
- NETCDF=$( echo "${ncdump}" | cut -d " " -f 3 )
- export NETCDF
-elif [[ -d /glade ]] ; then
- # We are on NCAR Yellowstone
- echo WARNING: UFSDA NOT SUPPORTED ON THIS PLATFORM
-elif [[ -d /lustre && -d /ncrc ]] ; then
- # We are on GAEA.
- echo WARNING: UFSDA NOT SUPPORTED ON THIS PLATFORM
-elif [[ -d /data/prod ]] ; then
- # We are on SSEC S4
- echo WARNING: UFSDA NOT SUPPORTED ON THIS PLATFORM
-else
- echo WARNING: UNKNOWN PLATFORM
-fi
+case "${MACHINE_ID}" in
+ ("hera" | "orion" | "hercules")
+ module load "${MODS}/${MACHINE_ID}"
+ ncdump=$( command -v ncdump )
+ NETCDF=$( echo "${ncdump}" | cut -d " " -f 3 )
+ export NETCDF
+ ;;
+ ("wcoss2" | "acorn" | "jet" | "gaea" | "s4")
+ echo WARNING: UFSDA NOT SUPPORTED ON THIS PLATFORM
+ ;;
+ *)
+ echo "WARNING: UNKNOWN PLATFORM"
+ ;;
+esac
module list
pip list
diff --git a/ush/minmon_xtrct_costs.pl b/ush/minmon_xtrct_costs.pl
index 502032da80..c56ac3bdad 100755
--- a/ush/minmon_xtrct_costs.pl
+++ b/ush/minmon_xtrct_costs.pl
@@ -22,8 +22,8 @@
#
#---------------------------
-if ($#ARGV != 4 ) {
- print "usage: minmon_xtrct_costs.pl SUFFIX PDY cyc infile jlogfile\n";
+if ($#ARGV != 3 ) {
+ print "usage: minmon_xtrct_costs.pl SUFFIX PDY cyc infile\n";
exit;
}
my $suffix = $ARGV[0];
@@ -31,7 +31,6 @@
my $pdy = $ARGV[1];
my $cyc = $ARGV[2];
my $infile = $ARGV[3];
-my $jlogfile = $ARGV[4];
my $use_costterms = 0;
my $no_data = 0.00;
diff --git a/ush/minmon_xtrct_gnorms.pl b/ush/minmon_xtrct_gnorms.pl
index 0125c58ac8..ac83c08cd3 100755
--- a/ush/minmon_xtrct_gnorms.pl
+++ b/ush/minmon_xtrct_gnorms.pl
@@ -185,8 +185,8 @@ sub updateGnormData {
#
#---------------------------------------------------------------------------
-if ($#ARGV != 4 ) {
- print "usage: minmon_xtrct_gnorms.pl SUFFIX pdy cyc infile jlogfile\n";
+if ($#ARGV != 3 ) {
+ print "usage: minmon_xtrct_gnorms.pl SUFFIX pdy cyc infile \n";
exit;
}
@@ -195,7 +195,6 @@ sub updateGnormData {
my $pdy = $ARGV[1];
my $cyc = $ARGV[2];
my $infile = $ARGV[3];
-my $jlogfile = $ARGV[4];
my $scr = "minmon_xtrct_gnorms.pl";
diff --git a/ush/minmon_xtrct_reduct.pl b/ush/minmon_xtrct_reduct.pl
index 1b8186b6ad..cc5da86af8 100755
--- a/ush/minmon_xtrct_reduct.pl
+++ b/ush/minmon_xtrct_reduct.pl
@@ -9,20 +9,18 @@
# reduction.ieee_d files ready for GrADS use.
#---------------------------------------------------------------------------
-if ($#ARGV != 4 ) {
- print "usage: minmon_xtrct_reduct.pl SUFFIX pdy cyc infile jlogfile\n";
+if ($#ARGV != 3 ) {
+ print "usage: minmon_xtrct_reduct.pl SUFFIX pdy cyc infile\n";
print " suffix is data source identifier\n";
print " pdy is YYYYMMDD of the cycle to be processed\n";
print " cyc is HH of the cycle to be processed\n";
print " infile is the data file containing the reduction stats\n";
- print " jlogfile is the job log file\n";
exit;
}
my $suffix = $ARGV[0];
my $pdy = $ARGV[1];
my $cyc = $ARGV[2];
my $infile = $ARGV[3];
-my $jlogfile = $ARGV[4];
my $scr = "minmon_xtrct_reduct.pl";
print "$scr has started\n";
diff --git a/ush/oceanice_nc2grib2.sh b/ush/oceanice_nc2grib2.sh
new file mode 100755
index 0000000000..1d0e5ae274
--- /dev/null
+++ b/ush/oceanice_nc2grib2.sh
@@ -0,0 +1,319 @@
+#!/bin/bash
+
+# This script contains functions to convert ocean/ice rectilinear netCDF files to grib2 format
+# This script uses the wgrib2 utility to convert the netCDF files to grib2 format and then indexes it
+
+source "${HOMEgfs}/ush/preamble.sh"
+
+################################################################################
+function _ice_nc2grib2 {
+# This function converts the ice rectilinear netCDF files to grib2 format
+
+ # Set the inputs
+ local grid=${1} # 0p25, 0p50, 1p00, 5p00
+ local latlon_dims=${2} # 0:721:0:1440, 0:361:0:720, 0:181:0:360, 0:36:0:72
+ local current_cycle=${3} # YYYYMMDDHH
+ local aperiod=${4} # 0-6
+ local infile=${5} # ice.0p25.nc
+ local outfile=${6} # ice.0p25.grib2
+ local template=${7} # template.global.0p25.gb2
+
+ ${WGRIB2} "${template}" \
+ -import_netcdf "${infile}" "hi_h" "0:1:${latlon_dims}" \
+ -set_var ICETK -set center 7 \
+ -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+ -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+ -import_netcdf "${infile}" "aice_h" "0:1:${latlon_dims}" \
+ -set_var ICEC -set center 7 \
+ -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+ -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+ -import_netcdf "${infile}" "Tsfc_h" "0:1:${latlon_dims}" \
+ -set_var ICETMP -set center 7 -rpn "273.15:+" \
+ -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+ -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+ -import_netcdf "${infile}" "uvel_h" "0:1:${latlon_dims}" \
+ -set_var UICE -set center 7 \
+ -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+ -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+ -import_netcdf "${infile}" "vvel_h" "0:1:${latlon_dims}" \
+ -set_var VICE -set center 7 \
+ -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+ -set_scaling same same -set_grib_type c1 -grib_out "${outfile}"
+
+# Additional variables needed for GFSv17/GEFSv13 operational forecast
+# files, but GRIB2 parameters not available in NCEP (-set center 7)
+# tables in wgrib2 v2.0.8:
+
+# -import_netcdf "${infile}" "hs_h" "0:1:${latlon_dims}" \
+# -set_var ??? -set center 7 \
+# -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+# -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+# -import_netcdf "${infile}" "frzmlt_h" "0:1:${latlon_dims}" \
+# -set_var ??? -set center 7 \
+# -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+# -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+# -import_netcdf "${infile}" "albsni_h" "0:1:${latlon_dims}" \
+# -set_var ALBICE -set center 7 -rpn "100.0:/" \
+# -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+# -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+# -import_netcdf "${infile}" "mlt_onset_h" "0:1:${latlon_dims}" \
+# -set_var ??? -set center 7 \
+# -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+# -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+# -import_netcdf "${infile}" "frz_onset_h" "0:1:${latlon_dims}" \
+# -set_var ??? -set center 7 \
+# -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+# -set_scaling same same -set_grib_type c1 -grib_out "${outfile}"
+
+ rc=$?
+ # Check if the conversion was successful
+ if (( rc != 0 )); then
+ echo "FATAL ERROR: Failed to convert the ice rectilinear netCDF file to grib2 format"
+ fi
+ return "${rc}"
+
+}
+
+################################################################################
+function _ocean2D_nc2grib2 {
+# This function converts the ocean 2D rectilinear netCDF files to grib2 format
+
+ # Set the inputs
+ local grid=${1} # 0p25, 0p50, 1p00, 5p00
+ local latlon_dims=${2} # 0:721:0:1440, 0:361:0:720, 0:181:0:360, 0:36:0:72
+ local current_cycle=${3} # YYYYMMDDHH
+ local aperiod=${4} # 0-6
+ local infile=${5} # ocean.0p25.nc
+ local outfile=${6} # ocean_2D.0p25.grib2
+ local template=${7} # template.global.0p25.gb2
+
+ ${WGRIB2} "${template}" \
+ -import_netcdf "${infile}" "SSH" "0:1:${latlon_dims}" \
+ -set_var SSHG -set center 7 \
+ -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+ -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+ -import_netcdf "${infile}" "SST" "0:1:${latlon_dims}" \
+ -set_var WTMP -set center 7 -rpn "273.15:+" \
+ -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+ -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+ -import_netcdf "${infile}" "SSS" "0:1:${latlon_dims}" \
+ -set_var SALIN -set center 7 \
+ -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+ -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+ -import_netcdf "${infile}" "speed" "0:1:${latlon_dims}" \
+ -set_var SPC -set center 7 \
+ -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+ -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+ -import_netcdf "${infile}" "SSU" "0:1:${latlon_dims}" \
+ -set_var UOGRD -set center 7 \
+ -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+ -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+ -import_netcdf "${infile}" "SSV" "0:1:${latlon_dims}" \
+ -set_var VOGRD -set center 7 \
+ -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+ -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+ -import_netcdf "${infile}" "latent" "0:1:${latlon_dims}" \
+ -set_var LHTFL -set center 7 \
+ -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+ -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+ -import_netcdf "${infile}" "sensible" "0:1:${latlon_dims}" \
+ -set_var SHTFL -set center 7 \
+ -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+ -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+ -import_netcdf "${infile}" "SW" "0:1:${latlon_dims}" \
+ -set_var DSWRF -set center 7 \
+ -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+ -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+ -import_netcdf "${infile}" "LW" "0:1:${latlon_dims}" \
+ -set_var DLWRF -set center 7 \
+ -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+ -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+ -import_netcdf "${infile}" "LwLatSens" "0:1:${latlon_dims}" \
+ -set_var THFLX -set center 7 \
+ -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+ -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+ -import_netcdf "${infile}" "MLD_003" "0:1:${latlon_dims}" \
+ -set_var WDEPTH -set center 7 -set_lev "mixed layer depth" \
+ -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+ -set_scaling same same -set_grib_type c1 -grib_out "${outfile}"
+
+# Additional variables needed for GFSv17/GEFSv13 operational forecast
+# files, but GRIB2 parameters not available in NCEP (-set center 7)
+# tables in wgrib2 v2.0.8:
+#
+# -import_netcdf "${infile}" "Heat_PmE" "0:1:${latlon_dims}" \
+# -set_var DWHFLUX -set center 7 \
+# -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+# -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+# -import_netcdf "${infile}" "taux" "0:1:${latlon_dims}" \
+# -set_var XCOMPSS -set center 7 \
+# -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+# -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+# -import_netcdf "${infile}" "tauy" "0:1:${latlon_dims}" \
+# -set_var YCOMPSS -set center 7 \
+# -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+# -set_scaling same same -set_grib_type c1 -grib_out "${outfile}"
+
+ rc=$?
+ # Check if the conversion was successful
+ if (( rc != 0 )); then
+ echo "FATAL ERROR: Failed to convert the ocean rectilinear netCDF file to grib2 format"
+ fi
+ return "${rc}"
+
+}
+
+################################################################################
+function _ocean3D_nc2grib2 {
+# This function converts the ocean 3D rectilinear netCDF files to grib2 format
+
+ # Set the inputs
+ local grid=${1} # 0p25, 0p50, 1p00, 5p00
+ local latlon_dims=${2} # 0:721:0:1440, 0:361:0:720, 0:181:0:360, 0:36:0:72
+ local levels=${3} # 5:15:25:35:45:55:65:75:85:95:105:115:125
+ local current_cycle=${4} # YYYYMMDDHH
+ local aperiod=${5} # 0-6
+ local infile=${6} # ocean.0p25.nc
+ local outfile=${7} # ocean_3D.0p25.grib2
+ local template=${8} # template.global.0p25.gb2
+
+ IFS=':' read -ra depths <<< "${levels}"
+
+ zl=0
+ for depth in "${depths[@]}"; do
+
+ [[ -f "tmp.gb2" ]] && rm -f "tmp.gb2"
+
+ ${WGRIB2} "${template}" \
+ -import_netcdf "${infile}" "temp" "0:1:${zl}:1:${latlon_dims}" \
+ -set_var WTMP -set center 7 -rpn "273.15:+" \
+ -set_lev "${depth} m below water surface" \
+ -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+ -set_scaling same same -set_grib_type c1 -grib_out tmp.gb2 \
+ -import_netcdf "${infile}" "so" "0:1:${zl}:1:${latlon_dims}" \
+ -set_var SALIN -set center 7 \
+ -set_lev "${depth} m below water surface" \
+ -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+ -set_scaling same same -set_grib_type c1 -grib_out tmp.gb2 \
+ -import_netcdf "${infile}" "uo" "0:1:${zl}:1:${latlon_dims}" \
+ -set_var UOGRD -set center 7 \
+ -set_lev "${depth} m below water surface" \
+ -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+ -set_scaling same same -set_grib_type c1 -grib_out tmp.gb2 \
+ -import_netcdf "${infile}" "vo" "0:1:${zl}:1:${latlon_dims}" \
+ -set_var VOGRD -set center 7 \
+ -set_lev "${depth} m below water surface" \
+ -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+ -set_scaling same same -set_grib_type c1 -grib_out tmp.gb2
+
+ rc=$?
+ # Check if the conversion was successful
+ if (( rc != 0 )); then
+ echo "FATAL ERROR: Failed to convert the ocean rectilinear netCDF file to grib2 format at depth ${depth}m, ABORT!"
+ return "${rc}"
+ fi
+
+ cat tmp.gb2 >> "${outfile}"
+ rm -f tmp.gb2
+ ((zl = zl + 1))
+
+ done
+
+ # Notes:
+ # WATPTEMP (water potential temperature (theta)) may be a better
+ # GRIB2 parameter than WTMP (water temperature) if MOM6 outputs
+ # potential temperature. WATPTEMP is not available in NCEP
+ # (-set center 7) tables in wgrib2 v2.0.8.
+
+ return "${rc}"
+
+}
+
+################################################################################
+# Input arguments
+component=${1:?"Need a valid component; options: ice|ocean"}
+grid=${2:-"0p25"} # Default to 0.25-degree grid
+current_cycle=${3:-"2013100100"} # Default to 2013100100
+avg_period=${4:-"0-6"} # Default to 6-hourly average
+ocean_levels=${5:-"5:15:25:35:45:55:65:75:85:95:105:115:125"} # Default to 12-levels
+
+case "${grid}" in
+ "0p25")
+ latlon_dims="0:721:0:1440"
+ ;;
+ "0p50")
+ latlon_dims="0:361:0:720"
+ ;;
+ "1p00")
+ latlon_dims="0:181:0:360"
+ ;;
+ "5p00")
+ latlon_dims="0:36:0:72"
+ ;;
+ *)
+ echo "FATAL ERROR: Unsupported grid '${grid}', ABORT!"
+ exit 1
+ ;;
+esac
+
+input_file="${component}.${grid}.nc"
+template="template.global.${grid}.gb2"
+
+# Check if the template file exists
+if [[ ! -f "${template}" ]]; then
+ echo "FATAL ERROR: '${template}' does not exist, ABORT!"
+ exit 127
+fi
+
+# Check if the input file exists
+if [[ ! -f "${input_file}" ]]; then
+ echo "FATAL ERROR: '${input_file}' does not exist, ABORT!"
+ exit 127
+fi
+
+case "${component}" in
+ "ice")
+ rm -f "${component}.${grid}.grib2" || true
+ _ice_nc2grib2 "${grid}" "${latlon_dims}" "${current_cycle}" "${avg_period}" "${input_file}" "${component}.${grid}.grib2" "${template}"
+ rc=$?
+ if (( rc != 0 )); then
+ echo "FATAL ERROR: Failed to convert the ice rectilinear netCDF file to grib2 format"
+ exit "${rc}"
+ fi
+ ;;
+ "ocean")
+ rm -f "${component}_2D.${grid}.grib2" || true
+ _ocean2D_nc2grib2 "${grid}" "${latlon_dims}" "${current_cycle}" "${avg_period}" "${input_file}" "${component}_2D.${grid}.grib2" "${template}"
+ rc=$?
+ if (( rc != 0 )); then
+ echo "FATAL ERROR: Failed to convert the ocean 2D rectilinear netCDF file to grib2 format"
+ exit "${rc}"
+ fi
+ rm -f "${component}_3D.${grid}.grib2" || true
+ _ocean3D_nc2grib2 "${grid}" "${latlon_dims}" "${ocean_levels}" "${current_cycle}" "${avg_period}" "${input_file}" "${component}_3D.${grid}.grib2" "${template}"
+ rc=$?
+ if (( rc != 0 )); then
+ echo "FATAL ERROR: Failed to convert the ocean 3D rectilinear netCDF file to grib2 format"
+ exit "${rc}"
+ fi
+ # Combine the 2D and 3D grib2 files into a single file
+ rm -f "${component}.${grid}.grib2" || true
+ cat "${component}_2D.${grid}.grib2" "${component}_3D.${grid}.grib2" > "${component}.${grid}.grib2"
+
+ ;;
+ *)
+ echo "FATAL ERROR: Unknown component: '${component}'. ABORT!"
+ exit 3
+ ;;
+esac
+
+# Index the output grib2 file
+${WGRIB2} -s "${component}.${grid}.grib2" > "${component}.${grid}.grib2.idx"
+rc=$?
+# Check if the indexing was successful
+if (( rc != 0 )); then
+ echo "FATAL ERROR: Failed to index the file '${component}.${grid}.grib2'"
+ exit "${rc}"
+fi
+
+exit 0
diff --git a/ush/ocnpost.ncl b/ush/ocnpost.ncl
deleted file mode 100755
index 27e60b0edf..0000000000
--- a/ush/ocnpost.ncl
+++ /dev/null
@@ -1,588 +0,0 @@
-;------------------------------------------------------------------
-; Denise.Worthen@noaa.gov (Feb 2019)
-;
-; This script will remap MOM6 ocean output on the tripole grid to
-; a set of rectilinear grids using pre-computed ESMF weights to remap
-; the listed fields to the destination grid and write the results
-; to a new netCDF file
-;
-; Prior to running this script, files containing the conservative
-; and bilinear regridding weights must be generated. These weights
-; are created using the generate_iceocnpost_weights.ncl script.
-;
-; Note: the descriptive text below assumes fortran type indexing
-; where the variables are indexed as (i,j) and indices start at 1
-; NCL indices are (j,i) and start at 0
-;
-; The post involves these steps
-;
-; a) unstaggering velocity points
-; MOM6 is on an Arakawa C grid. MOM6 refers to these
-; locations as "Ct" for the centers and "Cu", "Cv"
-; "Bu" for the left-right, north-south and corner
-; points, respectively.
-;
-; The indexing scheme in MOM6 is as follows:
-;
-; Cv@i,j
-; ----X------X Bu@i,j
-; |
-; |
-; Ct@i,j |
-; X X Cu@i,j
-; |
-; |
-; |
-;
-; CICE5 is on an Arakawa B grid. CICE5 refers to these
-; locations as TLAT,TLON for the centers and ULAT,ULON
-; for the corners
-;
-; In UFS, the CICE5 grid has been created using the MOM6
-; supergrid file. Therefore, all grid points are consistent
-; between the two models.
-;
-; In the following, MOM6's nomenclature will be followed,
-; so that CICE5's U-grid will be referred to as "Bu".
-;
-; b) rotation of tripole vectors to East-West
-; MOM6 and CICE6 both output velocties on their native
-; velocity points. For MOM6, that is u-velocities on the
-; Cu grid and v-velocites on the Cv grid. For CICE5, it is
-; both u and v-velocities on the Bu grid.
-;
-; The rotation angle for both models are defined at center
-; grid points; therefore the velocities need to be first
-; unstaggered before rotation. MOM6 and CICE5 also define
-; opposite directions for the rotations. Finally, while the
-; grid points are identical between the two models, CICE5
-; calculates the rotation angle at center grid points by
-; averaging the four surrounding B grid points. MOM6 derives
-; the rotation angle at the center directly from the latitude
-; and longitude of the center grid points. The angles are therefor
-; not identical between the two grids.
-;
-; c) conservative regridding of some fields
-; Fields such as ice concentration or fluxes which inherently
-; area area-weighted require conservative regridding. Most other
-; variables are state variables and can be regridded using
-; bilinear weighting.
-;
-; An efficient way to accomplish the unstaggering of velocities
-; is to use the bilinear interpolation weights between grid
-; points of the Arakawa C grid and the center grid points (for example
-; Cu->Ct). These weights are generated by the weight generation script
-;
-; Remapping from the tripole to rectilinear uses either the bilinear
-; or conservative weights from the weight generation script. Bilinear weights
-; generated for the first vertical level can be used on other levels
-; (where the masking changes) by utilizing the correct masking procedure.
-; Set output_masks to true to examine the interpolation masks.
-;
-; Intermediate file output can easily be generated for debugging by
-; follwing the example in the output_masks logical
-;
-; Bin.Li@noaa.gov (May 2019)
-; The scripts is revised for use in the coupled workflow.
-;
- load "$NCARG_ROOT/lib/ncarg/nclscripts/esmf/ESMF_regridding.ncl"
-
-;----------------------------------------------------------------------
-begin
-;
-
- ; warnings (generated by int2p_n_Wrap) can be supressed by
- ; the following (comment out to get the warnings)
- err = NhlGetErrorObjectId()
- setvalues err
-; "errLevel" : "Fatal" ; only report Fatal errors
- "errLevel" : "Verbose"
- end setvalues
-
- output_masks = False
-
- ; specify a location to use
- ; nemsrc = "/scratch4/NCEPDEV/ocean/save/Denise.Worthen/NEMS_INPUT0.1/ocnicepost/"
- ; interpolation methods
- methods = (/"bilinear" ,"conserve"/)
- ; ocean model output location
- ;dirsrc = "/scratch3/NCEPDEV/stmp2/Denise.Worthen/BM1_ocn/"
-
- ; destination grid sizes and name
- dsttype = (/"rect."/)
- ;dstgrds = (/"1p0", "0p5", "0p25"/)
- ;dstgrds = (/"0p5"/)
- dstgrds = (/"0p25"/)
-
- ; variables to be regridded with the native tripole stagger location
- ; and dimensionality
- ; first BM contained only field "mld", which was actually ePBL
- ; the remaining BMs contain ePBL, MLD_003 and MLD_0125
- ; the following NCO command will be issued at the end
- ; to rename the variable mld to ePBL if the variable mld is found
- ; ncocmd = "ncrename -O -v mld,ePBL "
- ncocmd = "ncrename -O -v MLD_003,mld"
-
- varlist = (/ (/ "SSH", "Ct", "bilinear", "2"/) \
- ,(/ "SST", "Ct", "bilinear", "2"/) \
- ,(/ "SSS", "Ct", "bilinear", "2"/) \
- ,(/ "speed", "Ct", "bilinear", "2"/) \
- ,(/ "temp", "Ct", "bilinear", "3"/) \
- ,(/ "so", "Ct", "bilinear", "3"/) \
- ,(/ "latent", "Ct", "conserve", "2"/) \
- ,(/ "sensible", "Ct", "conserve", "2"/) \
- ,(/ "SW", "Ct", "conserve", "2"/) \
- ,(/ "LW", "Ct", "conserve", "2"/) \
- ,(/ "evap", "Ct", "conserve", "2"/) \
- ,(/ "lprec", "Ct", "conserve", "2"/) \
- ,(/ "fprec", "Ct", "conserve", "2"/) \
- ,(/"LwLatSens", "Ct", "conserve", "2"/) \
- ,(/ "Heat_PmE", "Ct", "conserve", "2"/) \
-; ,(/ "mld", "Ct", "bilinear", "2"/) \
- ,(/ "ePBL", "Ct", "bilinear", "2"/) \
- ,(/ "MLD_003", "Ct", "bilinear", "2"/) \
- ,(/ "MLD_0125", "Ct", "bilinear", "2"/) \
- /)
- dims = dimsizes(varlist)
- nvars = dims(0)
- delete(dims)
- ;print(varlist)
-
- ; vectors to be regridded with the native tripole stagger location
- ; and dimensionality
- ; note: vectors are always unstaggered using bilinear weights, but can
- ; be remapped using conservative
- nvpairs = 3
- veclist = new( (/nvpairs,4,2/),"string")
- veclist = (/ (/ (/ "SSU", "SSV"/), (/"Cu", "Cv"/), (/"bilinear", "bilinear"/), (/"2", "2"/) /) \
- , (/ (/ "uo", "vo"/), (/"Cu", "Cv"/), (/"bilinear", "bilinear"/), (/"3", "3"/) /) \
- , (/ (/ "taux", "tauy"/), (/"Cu", "Cv"/), (/"conserve", "conserve"/), (/"2", "2"/) /) \
- /)
- ;print(veclist)
-
- begTime = get_cpu_time()
-;----------------------------------------------------------------------
-; make a list of the directories and files from the run
-;----------------------------------------------------------------------
-
-; idate = "20120101"
-
-; ocnfilelist = systemfunc("ls "+dirsrc+"gfs."+idate+"/00/"+"ocn*.nc")
-; ocnf = addfiles(ocnfilelist,"r")
-; nfiles = dimsizes(ocnfilelist)
-;
-
- ; get the rotation angles and vertical grid from the first file
- ; two different name were used for the angles, either sinrot,cosrot
- ; or sin_rot,cos_rot
- if(isfilevar(ocnf[0],"sin_rot"))then
- sinrot = ocnf[0]->sin_rot
- else
- sinrot = ocnf[0]->sinrot
- end if
- if(isfilevar(ocnf[0],"cos_rot"))then
- cosrot = ocnf[0]->cos_rot
- else
- cosrot = ocnf[0]->cosrot
- end if
- z_l = ocnf[0]->z_l
- z_i = ocnf[0]->z_i
- nlevs = dimsizes(z_l)
-
- ; get a 2 and 3 dimensional fields for creating the interpolation masks
- ; the mask2d,mask3d contain 1's on land and 0's at valid points.
- mask2d = where(ismissing(ocnf[0]->SST), 1.0, 0.0)
- mask3d = where(ismissing(ocnf[0]->temp), 1.0, 0.0)
- ;printVarSummary(mask2d)
- ;printVarSummary(mask3d)
-
- ; create conformed rotation arrays to make vector rotations cleaner
- sinrot2d=conform_dims(dimsizes(mask2d),sinrot,(/1,2/))
- cosrot2d=conform_dims(dimsizes(mask2d),cosrot,(/1,2/))
-
- sinrot3d=conform_dims(dimsizes(mask3d),sinrot,(/2,3/))
- cosrot3d=conform_dims(dimsizes(mask3d),cosrot,(/2,3/))
-
- ; check for variables in file. this is only required because
- ; of the missing/misnamed MLD variables in the first BM
- ; only the varlist is checked, since it is assumed there are
- ; no other variables missing after the first benchmark
- valid = new((/nvars/),"logical")
- valid = False
- do nv = 0,nvars-1
- varname = varlist(nv,0)
- if(isfilevar(ocnf[0],varname))then
- valid(nv) = True
- end if
- print(varlist(nv,0)+" "+valid(nv))
- end do
-
-;----------------------------------------------------------------------
-; loop over the output resolutions
-;----------------------------------------------------------------------
-
- jj = 1
- ii = 0
-
- do jj = 0,dimsizes(dstgrds)-1
- ;outres = "_"+dstgrds(jj)+"x"+dstgrds(jj)
- outres = dstgrds(jj)+"x"+dstgrds(jj)
- outgrid = dstgrds(jj)
-
- ; regrid a field to obtain the output xy dimensions
- wgtsfile = nemsrc+"/"+"tripole.mx025.Ct.to."+dsttype+dstgrds(jj)+".bilinear.nc"
- tt = ESMF_regrid_with_weights(sinrot,wgtsfile,False)
- tt!0 = "lat"
- tt!1 = "lon"
- lat = tt&lat
- lon = tt&lon
- dims = dimsizes(tt)
- nlat = dims(0)
- nlon = dims(1)
-
- print("fields will be remapped to destination grid size "\
- +nlon+" "+nlat)
-
- delete(tt)
- delete(dims)
-
- ; regrid the masks to obtain the interpolation masks.
- ; the mask2d,mask3d contain 1's on land and 0's at valid points.
- ; when remapped, any mask value > 0 identifies land values that
- ; have crept into the field. remapped model fields are then
- ; masked with this interpolation mask
-
- wgtsfile = nemsrc+"/"+"tripole.mx025.Ct.to."+dsttype+dstgrds(jj)+".bilinear.nc"
- rgmask2d = ESMF_regrid_with_weights(mask2d, wgtsfile,False)
- rgmask3d = ESMF_regrid_with_weights(mask3d, wgtsfile,False)
-
- if(output_masks)then
- testfile = "masks_"+dstgrds(jj)+".nc"
- system("/bin/rm -f "+testfile)
- ; create
- testcdf = addfile(testfile,"c")
- testcdf->rgmask2d = rgmask2d
- testcdf->rgmask3d = rgmask3d
- ; close
- delete(testcdf)
- end if
-
- ; create the interpolation mask
- rgmask2d = where(rgmask2d .gt. 0.0, rgmask2d@_FillValue, 1.0)
- rgmask3d = where(rgmask3d .gt. 0.0, rgmask3d@_FillValue, 1.0)
-
- ; conformed depth array
- depth = conform_dims(dimsizes(mask3d), z_l, (/1/))
- ;print(dimsizes(depth))
-
-;----------------------------------------------------------------------
-; loop over each file in the ocnfilelist
-;----------------------------------------------------------------------
-;
-
- ; retrieve the time stamp
- time = ocnf[0]->time
- delete(time@bounds)
-
-;----------------------------------------------------------------------
-; set up the output netcdf file
-;----------------------------------------------------------------------
-; system("/bin/rm -f " + outfile) ; remove if exists
-; outcdf = addfile (outfile, "c") ; open output file
-; specify output file information and open file for output
- FILENAME_REGRID = DATA_TMP+"/ocnr"+VDATE+"."+ENSMEM+"."+IDATE+"_"+outres+"_MOM6.nc"
- if (isfilepresent(FILENAME_REGRID)) then
- system("rm -f "+FILENAME_REGRID)
- end if
- outcdf = addfile(FILENAME_REGRID,"c")
- outfile=FILENAME_REGRID
-
- ; explicitly declare file definition mode. Improve efficiency.
- setfileoption(outcdf,"DefineMode",True)
-
- ; create global attributes of the file
- fAtt = True ; assign file attributes
- fAtt@creation_date = systemfunc ("date")
- fAtt@source_file = infile
- fileattdef( outcdf, fAtt ) ; copy file attributes
-
- ; predefine the coordinate variables and their dimensionality
- ; dimNames = (/"time", "z_l", "z_i", "z_T", "lat", "lon"/)
- dimNames = (/"time", "z_l", "z_i", "lat", "lon"/)
- ;dimSizes = (/ -1 , nlevs, nlevs+1, nTd, nlat, nlon/)
- dimSizes = (/ -1 , nlevs, nlevs+1, nlat, nlon/)
- ;dimUnlim = (/ True , False, False, False, False, False/)
- dimUnlim = (/ True , False, False, False, False/)
- filedimdef(outcdf,dimNames,dimSizes,dimUnlim)
-
- ; predefine the the dimensionality of the variables to be written out
- filevardef(outcdf, "time", typeof(time), getvardims(time))
- filevardef(outcdf, "z_l", typeof(z_l), getvardims(z_l))
- filevardef(outcdf, "z_i", typeof(z_i), getvardims(z_i))
- ;filevardef(outcdf, "z_T", typeof(z_T), getvardims(z_T))
- filevardef(outcdf, "lat", typeof(lat), getvardims(lat))
- filevardef(outcdf, "lon", typeof(lon), getvardims(lon))
-
- ; Copy attributes associated with each variable to the file
- filevarattdef(outcdf, "time", time)
- filevarattdef(outcdf, "z_l", z_l)
- filevarattdef(outcdf, "z_i", z_i)
- ;filevarattdef(outcdf, "z_T", z_T)
- filevarattdef(outcdf, "lat", lat)
- filevarattdef(outcdf, "lon", lon)
-
- ; predefine variables
- do nv = 0,nvars-1
- varname = varlist(nv,0)
- vardims = varlist(nv,3)
- if(valid(nv))then
- if(vardims .eq. "2")then
- odims = (/"time", "lat", "lon"/)
- else
- odims = (/"time", "z_l", "lat", "lon"/)
- end if
- ;print("creating variable "+varname+" in file")
- filevardef(outcdf, varname, "float", odims)
- delete(odims)
- end if
- end do
-
- do nv = 0,nvpairs-1
- do nn = 0,1
- vecname = veclist(nv,0,nn)
- vecdims = veclist(nv,3,nn)
- if(vecdims .eq. "2")then
- odims = (/"time", "lat", "lon"/)
- else
- odims = (/"time", "z_l", "lat", "lon"/)
- end if
- ;print("creating variable "+vecname+" in file")
- filevardef(outcdf, vecname, "float", odims)
- delete(odims)
- delete(vecdims)
- end do
- end do
-
- ; explicitly exit file definition mode.
- setfileoption(outcdf,"DefineMode",False)
-
- ; write the dimensions to the file
- outcdf->time = (/time/)
- outcdf->z_l = (/z_l/)
- outcdf->z_i = (/z_i/)
-; outcdf->z_T = (/z_T/)
-;
- outcdf->lat = (/lat/)
- outcdf->lon = (/lon/)
-
-;----------------------------------------------------------------------
-; loop over nvars variables
-;----------------------------------------------------------------------
-
- do nv = 0,nvars-1
- varname = varlist(nv,0)
- vargrid = varlist(nv,1)
- varmeth = varlist(nv,2)
- vardims = varlist(nv,3)
-
- if(valid(nv))then
- ;print(nv+" "+varname+" "+vargrid+" "+varmeth)
- ocnvar = ocnf[ii]->$varname$
- ndims = dimsizes(dimsizes(ocnvar))
- ;print(ndims+" "+dimsizes(ocnvar))
-
- if(vargrid .ne. "Ct")then
- ; print error if the variable is not on the Ct grid
- print("Variable is not on Ct grid")
- exit
- end if
-
- ; regrid to dsttype+dstgrd with method
- ;print("remapping "+varname+" to grid "+dsttype+dstgrds(jj))
- wgtsfile = nemsrc+"/"+"tripole.mx025.Ct.to."+dsttype+dstgrds(jj)+"."+varmeth+".nc"
-
- rgtt = ESMF_regrid_with_weights(ocnvar,wgtsfile,False)
- if(vardims .eq. "2")then
- rgtt = where(ismissing(rgmask2d),ocnvar@_FillValue,rgtt)
- rgtt=rgtt(:,::-1,:)
- else
- rgtt = where(ismissing(rgmask3d),ocnvar@_FillValue,rgtt)
- rgtt=rgtt(:,:,::-1,:)
- end if
-
- ; enter file definition mode to add variable attributes
- setfileoption(outcdf,"DefineMode",True)
- filevarattdef(outcdf, varname, rgtt)
- setfileoption(outcdf,"DefineMode",False)
-
- outcdf->$varname$ = (/rgtt/)
-
- delete(ocnvar)
- delete(rgtt)
-
- ; variable exists
- end if
- ; nv, loop over number of variables
- end do
-
-;----------------------------------------------------------------------
-;
-;----------------------------------------------------------------------
-
- ;nv = 2
- do nv = 0,nvpairs-1
- vecnames = veclist(nv,0,:)
- vecgrids = veclist(nv,1,:)
- vecmeth = veclist(nv,2,:)
- vecdims = veclist(nv,3,:)
- ;print(nv+" "+vecnames+" "+vecgrids+" "+vecmeth)
-
- ; create a vector pair list
- vecpairs = NewList("fifo")
- n = 0
- uvel = ocnf[ii]->$vecnames(n)$
- vecfld = where(ismissing(uvel),0.0,uvel)
- copy_VarAtts(uvel,vecfld)
- ;print("unstagger "+vecnames(n)+" from "+vecgrids(n)+" to Ct")
- wgtsfile = nemsrc+"/"+"tripole.mx025."+vecgrids(n)+".to.Ct.bilinear.nc"
- ut = ESMF_regrid_with_weights(vecfld,wgtsfile,False)
- delete(ut@remap)
-
- n = 1
- vvel = ocnf[ii]->$vecnames(n)$
- vecfld = where(ismissing(vvel),0.0,vvel)
- copy_VarAtts(vvel,vecfld)
- ;print("unstagger "+vecnames(n)+" from "+vecgrids(n)+" to Ct")
- wgtsfile = nemsrc+"/"+"tripole.mx025."+vecgrids(n)+".to.Ct.bilinear.nc"
- vt = ESMF_regrid_with_weights(vecfld,wgtsfile,False)
- delete(vt@remap)
-
- ListAppend(vecpairs,ut)
- ListAppend(vecpairs,vt)
- ;print(vecpairs)
-
- ; rotate
- ; first copy Metadata
- urot = vecpairs[0]
- vrot = vecpairs[1]
- if(vecdims(0) .eq. "2")then
- urot = ut*cosrot2d + vt*sinrot2d
- vrot = vt*cosrot2d - ut*sinrot2d
- else
- urot = ut*cosrot3d + vt*sinrot3d
- vrot = vt*cosrot3d - ut*sinrot3d
- end if
- ; change attribute to indicate these are now rotated velocities
- urot@long_name=str_sub_str(urot@long_name,"X","Zonal")
- vrot@long_name=str_sub_str(vrot@long_name,"Y","Meridional")
- ; copy back
- vecpairs[0] = urot
- vecpairs[1] = vrot
- delete([/urot, vrot/])
-
- ; remap
- do n = 0,1
- vecfld = vecpairs[n]
- ; regrid to dsttype+dstgrd with method
- ;print("remapping "+vecnames(n)+" to grid "+dsttype+dstgrds(jj))
- wgtsfile = nemsrc+"/"+"tripole.mx025.Ct.to."+dsttype+dstgrds(jj)+"."+vecmeth(n)+".nc"
-
- rgtt = ESMF_regrid_with_weights(vecfld,wgtsfile,False)
- if(vecdims(n) .eq. "2")then
- rgtt = where(ismissing(rgmask2d),vecfld@_FillValue,rgtt)
- rgtt=rgtt(:,::-1,:)
- else
- rgtt = where(ismissing(rgmask3d),vecfld@_FillValue,rgtt)
- rgtt=rgtt(:,:,::-1,:)
- end if
-
- ; enter file definition mode to add variable attributes
- setfileoption(outcdf,"DefineMode",True)
- filevarattdef(outcdf, vecnames(n), rgtt)
- setfileoption(outcdf,"DefineMode",False)
-
- outcdf->$vecnames(n)$ = (/rgtt/)
- delete(rgtt)
- end do
- delete([/uvel,vvel,ut,vt,vecfld,vecpairs/])
- delete([/vecnames,vecgrids,vecmeth,vecdims/])
- ; nv, loop over number of vector pairs
- end do
-
-;----------------------------------------------------------------------
-; close the outcdf and continue through filelist
-;----------------------------------------------------------------------
-
- delete(outcdf)
- ; rename mld to ePBL if required
- do nv = 0,nvars-1
- varname = varlist(nv,0)
- ; if(varname .eq. "mld" .and. valid(nv))then
- if(varname .eq. "MLD_003" .and. valid(nv))then
- print("Renaming MLD_003 to mld")
- ;print(ncocmd+" "+outfile)
- system(ncocmd+" "+outfile)
- end if
- end do
-
- ; ii, loop over files
-;
- ;jj, loop over destination grids
- delete([/lat,lon,nlon,nlat/])
- delete([/rgmask2d,rgmask3d/])
- end do
- print("One complete ocn file in " + (get_cpu_time() - begTime) + " seconds")
-exit
-end
diff --git a/ush/parsing_namelists_CICE.sh b/ush/parsing_namelists_CICE.sh
index 3f1798d3e9..d749e6d890 100755
--- a/ush/parsing_namelists_CICE.sh
+++ b/ush/parsing_namelists_CICE.sh
@@ -61,8 +61,8 @@ local CICE_RESTART_DIR="./CICE_RESTART/"
local CICE_RESTART_FILE="cice_model.res"
local CICE_DUMPFREQ="y" # "h","d","m" or "y" for restarts at intervals of "hours", "days", "months" or "years"
local CICE_DUMPFREQ_N=10000 # Set this to a really large value, as cice, mom6 and cmeps restart interval is controlled by ufs.configure
-local CICE_DIAGFREQ=6
-local CICE_HISTFREQ_N="0, 0, ${FHOUT}, 1, 1"
+local CICE_DIAGFREQ=$(( 86400 / DT_CICE )) # frequency of diagnostic output in timesteps, recommended for 1x per day
+local CICE_HISTFREQ_N="0, 0, ${FHOUT_OCNICE}, 1, 1"
if [[ "${RUN}" =~ "gdas" ]]; then
local CICE_HIST_AVG=".false., .false., .false., .false., .false." # DA needs instantaneous
else
diff --git a/ush/parsing_namelists_FV3.sh b/ush/parsing_namelists_FV3.sh
index 83e0c10525..88a0065076 100755
--- a/ush/parsing_namelists_FV3.sh
+++ b/ush/parsing_namelists_FV3.sh
@@ -9,6 +9,8 @@
## This script is a direct execution.
#####
+# Disable variable not used warnings
+# shellcheck disable=SC2034
FV3_namelists(){
# setup the tables
@@ -33,7 +35,15 @@ if [[ -n "${AERO_DIAG_TABLE:-}" ]]; then
cat "${AERO_DIAG_TABLE}"
fi
cat "${DIAG_TABLE_APPEND}"
-} >> diag_table
+} >> diag_table_template
+
+local template=diag_table_template
+local SYEAR=${current_cycle:0:4}
+local SMONTH=${current_cycle:4:2}
+local SDAY=${current_cycle:6:2}
+local CHOUR=${current_cycle:8:2}
+source "${HOMEgfs}/ush/atparse.bash"
+atparse < "${template}" >> "diag_table"
# copy data table
diff --git a/ush/parsing_ufs_configure.sh b/ush/parsing_ufs_configure.sh
index 2071586905..bec5c8f0f6 100755
--- a/ush/parsing_ufs_configure.sh
+++ b/ush/parsing_ufs_configure.sh
@@ -1,20 +1,15 @@
#! /usr/bin/env bash
#####
-## This script writes ufs.configure file
-## first, select a "*.IN" templates based on
-## $confignamevarforufs and parse values based on
-## $cpl** switches.
-##
-## This is a child script of modular
-## forecast script. This script is definition only (Is it? There is nothing defined here being used outside this script.)
+## This script writes ufs.configure file based on a template defined in
+## ${ufs_configure_template}
#####
# Disable variable not used warnings
# shellcheck disable=SC2034
writing_ufs_configure() {
-echo "SUB ${FUNCNAME[0]}: ufs.configure.sh begins"
+echo "SUB ${FUNCNAME[0]}: ufs.configure begins"
# Setup ufs.configure
local esmf_logkind=${esmf_logkind:-"ESMF_LOGKIND_MULTI"} #options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE
@@ -24,14 +19,13 @@ local cap_dbug_flag=${cap_dbug_flag:-0}
# Determine "cmeps_run_type" based on the availability of the mediator restart file
# If it is a warm_start, we already copied the mediator restart to DATA, if it was present
# If the mediator restart was not present, despite being a "warm_start", we put out a WARNING
-# in forecast_postdet.sh
+# in forecast_postdet.sh function CMEPS_postdet
if [[ -f "${DATA}/ufs.cpld.cpl.r.nc" ]]; then
local cmeps_run_type='continue'
else
local cmeps_run_type='startup'
fi
-
# Atm-related
local atm_model="fv3"
local atm_petlist_bounds="0 $(( ATMPETS-1 ))"
@@ -54,7 +48,7 @@ if [[ "${cplflx}" = ".true." ]]; then
local ocn_petlist_bounds="${ATMPETS} $(( ATMPETS+OCNPETS-1 ))"
local ocn_omp_num_threads="${OCNTHREADS}"
local RUNTYPE="${cmeps_run_type}"
- local CMEPS_RESTART_DIR="RESTART/"
+ local CMEPS_RESTART_DIR="CMEPS_RESTART/"
local CPLMODE="${cplmode}"
local coupling_interval_fast_sec="${CPL_FAST}"
local RESTART_N="${restart_interval}"
@@ -95,6 +89,8 @@ fi
if [[ ! -r "${ufs_configure_template}" ]]; then
echo "FATAL ERROR: template '${ufs_configure_template}' does not exist, ABORT!"
exit 1
+else
+ echo "INFO: using ufs.configure template: '${ufs_configure_template}'"
fi
source "${HOMEgfs}/ush/atparse.bash"
@@ -105,6 +101,6 @@ cat ufs.configure
${NCP} "${HOMEgfs}/sorc/ufs_model.fd/tests/parm/fd_ufs.yaml" fd_ufs.yaml
-echo "SUB ${FUNCNAME[0]}: ufs.configure.sh ends for ${ufs_configure_template}"
+echo "SUB ${FUNCNAME[0]}: ufs.configure ends"
}
diff --git a/ush/python/pygfs/task/analysis.py b/ush/python/pygfs/task/analysis.py
index cfd1fb2206..b562eeee4e 100644
--- a/ush/python/pygfs/task/analysis.py
+++ b/ush/python/pygfs/task/analysis.py
@@ -99,7 +99,7 @@ def get_bias_dict(self) -> Dict[str, Any]:
obdir = os.path.dirname(obfile)
basename = os.path.basename(obfile)
prefix = '.'.join(basename.split('.')[:-2])
- for file in ['satbias.nc4', 'satbias_cov.nc4', 'tlapse.txt']:
+ for file in ['satbias.nc', 'satbias_cov.nc', 'tlapse.txt']:
bfile = f"{prefix}.{file}"
copylist.append([os.path.join(self.task_config.COM_ATMOS_ANALYSIS_PREV, bfile), os.path.join(obdir, bfile)])
@@ -311,13 +311,13 @@ def tgz_diags(statfile: str, diagdir: str) -> None:
Parameters
----------
statfile : str | os.PathLike
- Path to the output .tar.gz .tgz file that will contain the diag*.nc4 files e.g. atmstat.tgz
+ Path to the output .tar.gz .tgz file that will contain the diag*.nc files e.g. atmstat.tgz
diagdir : str | os.PathLike
Directory containing JEDI diag files
"""
# get list of diag files to put in tarball
- diags = glob.glob(os.path.join(diagdir, 'diags', 'diag*nc4'))
+ diags = glob.glob(os.path.join(diagdir, 'diags', 'diag*nc'))
logger.info(f"Compressing {len(diags)} diag files to {statfile}")
diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py
index da41574fc9..7e2ae87b6b 100644
--- a/ush/python/pygfs/task/atm_analysis.py
+++ b/ush/python/pygfs/task/atm_analysis.py
@@ -94,7 +94,7 @@ def initialize(self: Analysis) -> None:
'NMEM_ENS', 'DATA', 'current_cycle', 'ntiles']
for key in keys:
localconf[key] = self.task_config[key]
- localconf.RUN = 'enkf' + self.task_config.RUN
+ localconf.RUN = 'enkfgdas'
localconf.dirname = 'ens'
FileHandler(self.get_fv3ens_dict(localconf)).sync()
@@ -152,7 +152,7 @@ def finalize(self: Analysis) -> None:
atmstat = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, f"{self.task_config.APREFIX}atmstat")
# get list of diag files to put in tarball
- diags = glob.glob(os.path.join(self.task_config.DATA, 'diags', 'diag*nc4'))
+ diags = glob.glob(os.path.join(self.task_config.DATA, 'diags', 'diag*nc'))
logger.info(f"Compressing {len(diags)} diag files to {atmstat}.gz")
diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py
index 9cf84c07c7..7b81a10f32 100644
--- a/ush/python/pygfs/task/atmens_analysis.py
+++ b/ush/python/pygfs/task/atmens_analysis.py
@@ -188,7 +188,7 @@ def finalize(self: Analysis) -> None:
atmensstat = os.path.join(self.task_config.COM_ATMOS_ANALYSIS_ENS, f"{self.task_config.APREFIX}atmensstat")
# get list of diag files to put in tarball
- diags = glob.glob(os.path.join(self.task_config.DATA, 'diags', 'diag*nc4'))
+ diags = glob.glob(os.path.join(self.task_config.DATA, 'diags', 'diag*nc'))
logger.info(f"Compressing {len(diags)} diag files to {atmensstat}.gz")
diff --git a/ush/python/pygfs/task/oceanice_products.py b/ush/python/pygfs/task/oceanice_products.py
new file mode 100644
index 0000000000..c865a9f408
--- /dev/null
+++ b/ush/python/pygfs/task/oceanice_products.py
@@ -0,0 +1,337 @@
+#!/usr/bin/env python3
+
+import os
+from logging import getLogger
+from typing import List, Dict, Any
+from pprint import pformat
+import xarray as xr
+
+from wxflow import (AttrDict,
+ parse_j2yaml,
+ FileHandler,
+ Jinja,
+ logit,
+ Task,
+ add_to_datetime, to_timedelta,
+ WorkflowException,
+ Executable)
+
+logger = getLogger(__name__.split('.')[-1])
+
+
+class OceanIceProducts(Task):
+ """Ocean Ice Products Task
+ """
+
+ VALID_COMPONENTS = ['ocean', 'ice']
+ COMPONENT_RES_MAP = {'ocean': 'OCNRES', 'ice': 'ICERES'}
+ VALID_PRODUCT_GRIDS = {'mx025': ['1p00', '0p25'],
+ 'mx050': ['1p00', '0p50'],
+ 'mx100': ['1p00'],
+ 'mx500': ['5p00']}
+
+ # These could be read from the yaml file
+ TRIPOLE_DIMS_MAP = {'mx025': [1440, 1080], 'mx050': [720, 526], 'mx100': [360, 320], 'mx500': [72, 35]}
+ LATLON_DIMS_MAP = {'0p25': [1440, 721], '0p50': [720, 361], '1p00': [360, 181], '5p00': [72, 36]}
+
+ @logit(logger, name="OceanIceProducts")
+ def __init__(self, config: Dict[str, Any]) -> None:
+ """Constructor for the Ocean/Ice Productstask
+
+ Parameters
+ ----------
+ config : Dict[str, Any]
+ Incoming configuration for the task from the environment
+
+ Returns
+ -------
+ None
+ """
+ super().__init__(config)
+
+ if self.config.COMPONENT not in self.VALID_COMPONENTS:
+ raise NotImplementedError(f'{self.config.COMPONENT} is not a valid model component.\n' +
+ 'Valid model components are:\n' +
+ f'{", ".join(self.VALID_COMPONENTS)}')
+
+ model_grid = f"mx{self.config[self.COMPONENT_RES_MAP[self.config.COMPONENT]]:03d}"
+
+ valid_datetime = add_to_datetime(self.runtime_config.current_cycle, to_timedelta(f"{self.config.FORECAST_HOUR}H"))
+
+ # TODO: This is a bit of a hack, but it works for now
+ # FIXME: find a better way to provide the averaging period
+ # This will be different for ocean and ice, so when they are made flexible, this will need to be addressed
+ avg_period = f"{self.config.FORECAST_HOUR-self.config.FHOUT_OCNICE_GFS:03d}-{self.config.FORECAST_HOUR:03d}"
+
+ localdict = AttrDict(
+ {'component': self.config.COMPONENT,
+ 'forecast_hour': self.config.FORECAST_HOUR,
+ 'valid_datetime': valid_datetime,
+ 'avg_period': avg_period,
+ 'model_grid': model_grid,
+ 'product_grids': self.VALID_PRODUCT_GRIDS[model_grid]}
+ )
+ self.task_config = AttrDict(**self.config, **self.runtime_config, **localdict)
+
+ # Read the oceanice_products.yaml file for common configuration
+ logger.info(f"Read the ocean ice products configuration yaml file {self.config.OCEANICEPRODUCTS_CONFIG}")
+ self.task_config.oceanice_yaml = parse_j2yaml(self.config.OCEANICEPRODUCTS_CONFIG, self.task_config)
+ logger.debug(f"oceanice_yaml:\n{pformat(self.task_config.oceanice_yaml)}")
+
+ @staticmethod
+ @logit(logger)
+ def initialize(config: Dict) -> None:
+ """Initialize the work directory by copying all the common fix data
+
+ Parameters
+ ----------
+ config : Dict
+ Configuration dictionary for the task
+
+ Returns
+ -------
+ None
+ """
+
+ # Copy static data to run directory
+ logger.info("Copy static data to run directory")
+ FileHandler(config.oceanice_yaml.ocnicepost.fix_data).sync()
+
+ # Copy "component" specific model data to run directory (e.g. ocean/ice forecast output)
+ logger.info(f"Copy {config.component} data to run directory")
+ FileHandler(config.oceanice_yaml[config.component].data_in).sync()
+
+ @staticmethod
+ @logit(logger)
+ def configure(config: Dict, product_grid: str) -> None:
+ """Configure the namelist for the product_grid in the work directory.
+ Create namelist 'ocnicepost.nml' from template
+
+ Parameters
+ ----------
+ config : Dict
+ Configuration dictionary for the task
+ product_grid : str
+ Target product grid to process
+
+ Returns
+ -------
+ None
+ """
+
+ # Make a localconf with the "component" specific configuration for parsing the namelist
+ localconf = AttrDict()
+ localconf.DATA = config.DATA
+ localconf.component = config.component
+
+ localconf.source_tripole_dims = ', '.join(map(str, OceanIceProducts.TRIPOLE_DIMS_MAP[config.model_grid]))
+ localconf.target_latlon_dims = ', '.join(map(str, OceanIceProducts.LATLON_DIMS_MAP[product_grid]))
+
+ localconf.maskvar = config.oceanice_yaml[config.component].namelist.maskvar
+ localconf.sinvar = config.oceanice_yaml[config.component].namelist.sinvar
+ localconf.cosvar = config.oceanice_yaml[config.component].namelist.cosvar
+ localconf.angvar = config.oceanice_yaml[config.component].namelist.angvar
+ localconf.debug = ".true." if config.oceanice_yaml.ocnicepost.namelist.debug else ".false."
+
+ logger.debug(f"localconf:\n{pformat(localconf)}")
+
+ # Configure the namelist and write to file
+ logger.info("Create namelist for ocnicepost.x")
+ nml_template = os.path.join(localconf.DATA, "ocnicepost.nml.jinja2")
+ nml_data = Jinja(nml_template, localconf).render
+ logger.debug(f"ocnicepost_nml:\n{nml_data}")
+ nml_file = os.path.join(localconf.DATA, "ocnicepost.nml")
+ with open(nml_file, "w") as fho:
+ fho.write(nml_data)
+
+ @staticmethod
+ @logit(logger)
+ def execute(config: Dict, product_grid: str) -> None:
+ """Run the ocnicepost.x executable to interpolate and convert to grib2
+
+ Parameters
+ ----------
+ config : Dict
+ Configuration dictionary for the task
+ product_grid : str
+ Target product grid to process
+
+ Returns
+ -------
+ None
+ """
+
+ # Run the ocnicepost.x executable
+ OceanIceProducts.interp(config.DATA, config.APRUN_OCNICEPOST, exec_name="ocnicepost.x")
+
+ # Convert interpolated netCDF file to grib2
+ OceanIceProducts.netCDF_to_grib2(config, product_grid)
+
+ @staticmethod
+ @logit(logger)
+ def interp(workdir: str, aprun_cmd: str, exec_name: str = "ocnicepost.x") -> None:
+ """
+ Run the interpolation executable to generate rectilinear netCDF file
+
+ Parameters
+ ----------
+ config : Dict
+ Configuration dictionary for the task
+ workdir : str
+ Working directory for the task
+ aprun_cmd : str
+ aprun command to use
+ exec_name : str
+ Name of the executable e.g. ocnicepost.x
+
+ Returns
+ -------
+ None
+ """
+ os.chdir(workdir)
+ logger.debug(f"Current working directory: {os.getcwd()}")
+
+ exec_cmd = Executable(aprun_cmd)
+ exec_cmd.add_default_arg(os.path.join(workdir, exec_name))
+
+ OceanIceProducts._call_executable(exec_cmd)
+
+ @staticmethod
+ @logit(logger)
+ def netCDF_to_grib2(config: Dict, grid: str) -> None:
+ """Convert interpolated netCDF file to grib2
+
+ Parameters
+ ----------
+ config : Dict
+ Configuration dictionary for the task
+ grid : str
+ Target product grid to process
+
+ Returns
+ ------
+ None
+ """
+
+ os.chdir(config.DATA)
+
+ exec_cmd = Executable(config.oceanice_yaml.nc2grib2.script)
+ arguments = [config.component, grid, config.current_cycle.strftime("%Y%m%d%H"), config.avg_period]
+ if config.component == 'ocean':
+ levs = config.oceanice_yaml.ocean.namelist.ocean_levels
+ arguments.append(':'.join(map(str, levs)))
+
+ logger.info(f"Executing {exec_cmd} with arguments {arguments}")
+ try:
+ exec_cmd(*arguments)
+ except OSError:
+ logger.exception(f"FATAL ERROR: Failed to execute {exec_cmd}")
+ raise OSError(f"{exec_cmd}")
+ except Exception:
+ logger.exception(f"FATAL ERROR: Error occurred during execution of {exec_cmd}")
+ raise WorkflowException(f"{exec_cmd}")
+
+ @staticmethod
+ @logit(logger)
+ def subset(config: Dict) -> None:
+ """
+ Subset a list of variables from a netcdf file and save to a new netcdf file.
+ Also save global attributes and history from the old netcdf file into new netcdf file
+
+ Parameters
+ ----------
+ config : Dict
+ Configuration dictionary for the task
+
+ Returns
+ -------
+ None
+ """
+
+ os.chdir(config.DATA)
+
+ input_file = f"{config.component}.nc"
+ output_file = f"{config.component}_subset.nc"
+ varlist = config.oceanice_yaml[config.component].subset
+
+ logger.info(f"Subsetting {varlist} from {input_file} to {output_file}")
+
+ try:
+ # open the netcdf file
+ ds = xr.open_dataset(input_file)
+
+ # subset the variables
+ ds_subset = ds[varlist]
+
+ # save global attributes from the old netcdf file into new netcdf file
+ ds_subset.attrs = ds.attrs
+
+ # save subsetted variables to a new netcdf file
+ ds_subset.to_netcdf(output_file)
+
+ except FileNotFoundError:
+ logger.exception(f"FATAL ERROR: Input file not found: {input_file}")
+ raise FileNotFoundError(f"File not found: {input_file}")
+
+ except IOError as err:
+ logger.exception(f"FATAL ERROR: IOError occurred during netCDF subset: {input_file}")
+ raise IOError(f"An I/O error occurred: {err}")
+
+ except Exception as err:
+ logger.exception(f"FATAL ERROR: Error occurred during netCDF subset: {input_file}")
+ raise WorkflowException(f"{err}")
+
+ finally:
+ # close the netcdf files
+ ds.close()
+ ds_subset.close()
+
+ @staticmethod
+ @logit(logger)
+ def _call_executable(exec_cmd: Executable) -> None:
+ """Internal method to call executable
+
+ Parameters
+ ----------
+ exec_cmd : Executable
+ Executable to run
+
+ Raises
+ ------
+ OSError
+ Failure due to OS issues
+ WorkflowException
+ All other exceptions
+ """
+
+ logger.info(f"Executing {exec_cmd}")
+ try:
+ exec_cmd()
+ except OSError:
+ logger.exception(f"FATAL ERROR: Failed to execute {exec_cmd}")
+ raise OSError(f"{exec_cmd}")
+ except Exception:
+ logger.exception(f"FATAL ERROR: Error occurred during execution of {exec_cmd}")
+ raise WorkflowException(f"{exec_cmd}")
+
+ @staticmethod
+ @logit(logger)
+ def finalize(config: Dict) -> None:
+ """Perform closing actions of the task.
+ Copy data back from the DATA/ directory to COM/
+
+ Parameters
+ ----------
+ config: Dict
+ Configuration dictionary for the task
+
+ Returns
+ -------
+ None
+ """
+
+ # Copy "component" specific generated data to COM/ directory
+ data_out = config.oceanice_yaml[config.component].data_out
+
+ logger.info(f"Copy processed data to COM/ directory")
+ FileHandler(data_out).sync()
diff --git a/ush/python/pygfs/task/land_analysis.py b/ush/python/pygfs/task/snow_analysis.py
similarity index 92%
rename from ush/python/pygfs/task/land_analysis.py
rename to ush/python/pygfs/task/snow_analysis.py
index 821caf2305..01c69dbc7b 100644
--- a/ush/python/pygfs/task/land_analysis.py
+++ b/ush/python/pygfs/task/snow_analysis.py
@@ -21,14 +21,14 @@
logger = getLogger(__name__.split('.')[-1])
-class LandAnalysis(Analysis):
+class SnowAnalysis(Analysis):
"""
- Class for global land analysis tasks
+ Class for global snow analysis tasks
"""
- NMEM_LANDENS = 2
+ NMEM_SNOWENS = 2
- @logit(logger, name="LandAnalysis")
+ @logit(logger, name="SnowAnalysis")
def __init__(self, config):
super().__init__(config)
@@ -43,8 +43,8 @@ def __init__(self, config):
'npy_ges': _res + 1,
'npz_ges': self.config.LEVS - 1,
'npz': self.config.LEVS - 1,
- 'LAND_WINDOW_BEGIN': _window_begin,
- 'LAND_WINDOW_LENGTH': f"PT{self.config['assim_freq']}H",
+ 'SNOW_WINDOW_BEGIN': _window_begin,
+ 'SNOW_WINDOW_LENGTH': f"PT{self.config['assim_freq']}H",
'OPREFIX': f"{self.runtime_config.RUN}.t{self.runtime_config.cyc:02d}z.",
'APREFIX': f"{self.runtime_config.RUN}.t{self.runtime_config.cyc:02d}z.",
'jedi_yaml': _letkfoi_yaml
@@ -56,9 +56,9 @@ def __init__(self, config):
@logit(logger)
def prepare_GTS(self) -> None:
- """Prepare the GTS data for a global land analysis
+ """Prepare the GTS data for a global snow analysis
- This method will prepare GTS data for a global land analysis using JEDI.
+ This method will prepare GTS data for a global snow analysis using JEDI.
This includes:
- processing GTS bufr snow depth observation data to IODA format
@@ -133,9 +133,9 @@ def _gtsbufr2iodax(exe, yaml_file):
@logit(logger)
def prepare_IMS(self) -> None:
- """Prepare the IMS data for a global land analysis
+ """Prepare the IMS data for a global snow analysis
- This method will prepare IMS data for a global land analysis using JEDI.
+ This method will prepare IMS data for a global snow analysis using JEDI.
This includes:
- staging model backgrounds
- processing raw IMS observation data and prepare for conversion to IODA
@@ -153,7 +153,7 @@ def prepare_IMS(self) -> None:
# create a temporary dict of all keys needed in this method
localconf = AttrDict()
keys = ['DATA', 'current_cycle', 'COM_OBS', 'COM_ATMOS_RESTART_PREV',
- 'OPREFIX', 'CASE', 'OCNRES', 'ntiles']
+ 'OPREFIX', 'CASE', 'OCNRES', 'ntiles', 'FIXgfs']
for key in keys:
localconf[key] = self.task_config[key]
@@ -232,7 +232,7 @@ def prepare_IMS(self) -> None:
@logit(logger)
def initialize(self) -> None:
- """Initialize method for Land analysis
+ """Initialize method for snow analysis
This method:
- creates artifacts in the DATA directory by copying fix files
- creates the JEDI LETKF yaml from the template
@@ -241,7 +241,7 @@ def initialize(self) -> None:
Parameters
----------
self : Analysis
- Instance of the LandAnalysis object
+ Instance of the SnowAnalysis object
"""
super().initialize()
@@ -255,12 +255,12 @@ def initialize(self) -> None:
# Make member directories in DATA for background
dirlist = []
- for imem in range(1, LandAnalysis.NMEM_LANDENS + 1):
+ for imem in range(1, SnowAnalysis.NMEM_SNOWENS + 1):
dirlist.append(os.path.join(localconf.DATA, 'bkg', f'mem{imem:03d}'))
FileHandler({'mkdir': dirlist}).sync()
# stage fix files
- jedi_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'gdas', 'land_jedi_fix.yaml')
+ jedi_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'gdas', 'snow_jedi_fix.yaml')
logger.info(f"Staging JEDI fix files from {jedi_fix_list_path}")
jedi_fix_list = parse_yamltmpl(jedi_fix_list_path, self.task_config)
FileHandler(jedi_fix_list).sync()
@@ -294,15 +294,15 @@ def execute(self) -> None:
Parameters
----------
self : Analysis
- Instance of the LandAnalysis object
+ Instance of the SnowAnalysis object
"""
# create a temporary dict of all keys needed in this method
localconf = AttrDict()
keys = ['HOMEgfs', 'DATA', 'current_cycle',
- 'COM_ATMOS_RESTART_PREV', 'COM_LAND_ANALYSIS', 'APREFIX',
+ 'COM_ATMOS_RESTART_PREV', 'COM_SNOW_ANALYSIS', 'APREFIX',
'SNOWDEPTHVAR', 'BESTDDEV', 'CASE', 'OCNRES', 'ntiles',
- 'APRUN_LANDANL', 'JEDIEXE', 'jedi_yaml',
+ 'APRUN_SNOWANL', 'JEDIEXE', 'jedi_yaml',
'APPLY_INCR_NML_TMPL', 'APPLY_INCR_EXE', 'APRUN_APPLY_INCR']
for key in keys:
localconf[key] = self.task_config[key]
@@ -314,7 +314,7 @@ def execute(self) -> None:
logger.info("Running JEDI LETKF")
self.execute_jediexe(localconf.DATA,
- localconf.APRUN_LANDANL,
+ localconf.APRUN_SNOWANL,
os.path.basename(localconf.JEDIEXE),
localconf.jedi_yaml)
@@ -323,7 +323,7 @@ def execute(self) -> None:
@logit(logger)
def finalize(self) -> None:
- """Performs closing actions of the Land analysis task
+ """Performs closing actions of the Snow analysis task
This method:
- tar and gzip the output diag files and place in COM/
- copy the generated YAML file from initialize to the COM/
@@ -333,11 +333,11 @@ def finalize(self) -> None:
Parameters
----------
self : Analysis
- Instance of the LandAnalysis object
+ Instance of the SnowAnalysis object
"""
logger.info("Create diagnostic tarball of diag*.nc4 files")
- statfile = os.path.join(self.task_config.COM_LAND_ANALYSIS, f"{self.task_config.APREFIX}landstat.tgz")
+ statfile = os.path.join(self.task_config.COM_SNOW_ANALYSIS, f"{self.task_config.APREFIX}snowstat.tgz")
self.tgz_diags(statfile, self.task_config.DATA)
logger.info("Copy full YAML to COM")
@@ -355,17 +355,17 @@ def finalize(self) -> None:
for itile in range(1, self.task_config.ntiles + 1):
filename = template.format(tilenum=itile)
src = os.path.join(self.task_config.DATA, 'anl', filename)
- dest = os.path.join(self.task_config.COM_LAND_ANALYSIS, filename)
+ dest = os.path.join(self.task_config.COM_SNOW_ANALYSIS, filename)
anllist.append([src, dest])
FileHandler({'copy': anllist}).sync()
logger.info('Copy increments to COM')
- template = f'landinc.{to_fv3time(self.task_config.current_cycle)}.sfc_data.tile{{tilenum}}.nc'
+ template = f'snowinc.{to_fv3time(self.task_config.current_cycle)}.sfc_data.tile{{tilenum}}.nc'
inclist = []
for itile in range(1, self.task_config.ntiles + 1):
filename = template.format(tilenum=itile)
src = os.path.join(self.task_config.DATA, 'anl', filename)
- dest = os.path.join(self.task_config.COM_LAND_ANALYSIS, filename)
+ dest = os.path.join(self.task_config.COM_SNOW_ANALYSIS, filename)
inclist.append([src, dest])
FileHandler({'copy': inclist}).sync()
@@ -375,7 +375,7 @@ def get_bkg_dict(config: Dict) -> Dict[str, List[str]]:
"""Compile a dictionary of model background files to copy
This method constructs a dictionary of FV3 RESTART files (coupler, sfc_data)
- that are needed for global land DA and returns said dictionary for use by the FileHandler class.
+ that are needed for global snow DA and returns said dictionary for use by the FileHandler class.
Parameters
----------
@@ -401,11 +401,11 @@ def get_bkg_dict(config: Dict) -> Dict[str, List[str]]:
# Start accumulating list of background files to copy
bkglist = []
- # land DA needs coupler
+ # snow DA needs coupler
basename = f'{to_fv3time(config.current_cycle)}.coupler.res'
bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)])
- # land DA only needs sfc_data
+ # snow DA only needs sfc_data
for ftype in ['sfc_data']:
template = f'{to_fv3time(config.current_cycle)}.{ftype}.tile{{tilenum}}.nc'
for itile in range(1, config.ntiles + 1):
@@ -447,17 +447,17 @@ def get_ens_bkg_dict(config: Dict) -> Dict:
# get FV3 sfc_data RESTART files; Note an ensemble is being created
rst_dir = os.path.join(config.COM_ATMOS_RESTART_PREV)
- for imem in range(1, LandAnalysis.NMEM_LANDENS + 1):
+ for imem in range(1, SnowAnalysis.NMEM_SNOWENS + 1):
memchar = f"mem{imem:03d}"
run_dir = os.path.join(config.DATA, 'bkg', memchar, 'RESTART')
dirlist.append(run_dir)
- # Land DA needs coupler
+ # Snow DA needs coupler
basename = f'{to_fv3time(config.current_cycle)}.coupler.res'
bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)])
- # Land DA only needs sfc_data
+ # Snow DA only needs sfc_data
for ftype in ['sfc_data']:
template = f'{to_fv3time(config.current_cycle)}.{ftype}.tile{{tilenum}}.nc'
for itile in range(1, config.ntiles + 1):
@@ -491,7 +491,7 @@ def create_ensemble(vname: str, bestddev: float, config: Dict) -> None:
"""
# 2 ens members
- offset = bestddev / np.sqrt(LandAnalysis.NMEM_LANDENS)
+ offset = bestddev / np.sqrt(SnowAnalysis.NMEM_SNOWENS)
logger.info(f"Creating ensemble for LETKFOI by offsetting with {offset}")
diff --git a/ush/radmon_verf_angle.sh b/ush/radmon_verf_angle.sh
index f68d7c88cc..1066627787 100755
--- a/ush/radmon_verf_angle.sh
+++ b/ush/radmon_verf_angle.sh
@@ -83,7 +83,6 @@ which prep_step
which startmsg
# File names
-export pgmout=${pgmout:-${jlogfile}}
touch "${pgmout}"
# Other variables
diff --git a/ush/radmon_verf_bcoef.sh b/ush/radmon_verf_bcoef.sh
index ab1058711e..b6fd82dff2 100755
--- a/ush/radmon_verf_bcoef.sh
+++ b/ush/radmon_verf_bcoef.sh
@@ -69,7 +69,6 @@ fi
echo " RADMON_NETCDF, netcdf_boolean = ${RADMON_NETCDF}, ${netcdf_boolean}"
# File names
-pgmout=${pgmout:-${jlogfile}}
touch "${pgmout}"
# Other variables
diff --git a/ush/radmon_verf_bcor.sh b/ush/radmon_verf_bcor.sh
index f1f97c247e..7aec48e2b3 100755
--- a/ush/radmon_verf_bcor.sh
+++ b/ush/radmon_verf_bcor.sh
@@ -65,7 +65,6 @@ source "${HOMEgfs}/ush/preamble.sh"
####################################################################
# File names
-pgmout=${pgmout:-${jlogfile}}
touch "${pgmout}"
# Other variables
diff --git a/ush/radmon_verf_time.sh b/ush/radmon_verf_time.sh
index 7f98407ec5..0df8029166 100755
--- a/ush/radmon_verf_time.sh
+++ b/ush/radmon_verf_time.sh
@@ -75,8 +75,6 @@ source "${HOMEgfs}/ush/preamble.sh"
####################################################################
# File names
-#pgmout=${pgmout:-${jlogfile}}
-#touch $pgmout
radmon_err_rpt=${radmon_err_rpt:-${USHgfs}/radmon_err_rpt.sh}
base_file=${base_file:-${PARMmonitor}/gdas_radmon_base.tar}
diff --git a/ush/syndat_getjtbul.sh b/ush/syndat_getjtbul.sh
index c17067ff72..3ea56db72a 100755
--- a/ush/syndat_getjtbul.sh
+++ b/ush/syndat_getjtbul.sh
@@ -22,10 +22,6 @@
# TANK_TROPCY - path to home directory containing tropical cyclone record
# data base
-# Imported variables that can be passed in:
-# jlogfile - path to job log file (skipped over by this script if not
-# passed in)
-
source "$HOMEgfs/ush/preamble.sh"
EXECSYND=${EXECSYND:-${HOMESYND}/exec}
@@ -52,8 +48,6 @@ hour=$(echo $CDATE10 | cut -c9-10)
echo $PDYm1
pdym1=$PDYm1
-#pdym1=$(sh $utilscript/finddate.sh $pdy d-1)
-
echo " " >> $pgmout
echo "Entering sub-shell syndat_getjtbul.sh to recover JTWC Bulletins" \
>> $pgmout
diff --git a/ush/syndat_qctropcy.sh b/ush/syndat_qctropcy.sh
index cda9030577..9d9b2ad69b 100755
--- a/ush/syndat_qctropcy.sh
+++ b/ush/syndat_qctropcy.sh
@@ -46,7 +46,6 @@
# subsequent program SYNDAT_SYNDATA)
# PARMSYND - path to syndat parm field directory
# EXECSYND - path to syndat executable directory
-# FIXam - path to syndat fix field directory
# USHSYND - path to syndat ush directory
# Imported variables that can be passed in:
@@ -59,7 +58,7 @@
# data base
# (Default: /dcom/us007003)
# slmask - path to t126 32-bit gaussian land/sea mask file
-# (Default: $FIXam/syndat_slmask.t126.gaussian)
+# (Default: ${FIXgfs}/am/syndat_slmask.t126.gaussian)
# copy_back - switch to copy updated files back to archive directory and
# to tcvitals directory
# (Default: YES)
@@ -74,12 +73,11 @@ HOMENHCp1=${HOMENHCp1:-/gpfs/?p1/nhc/save/guidance/storm-data/ncep}
HOMENHC=${HOMENHC:-/gpfs/dell2/nhc/save/guidance/storm-data/ncep}
TANK_TROPCY=${TANK_TROPCY:-${DCOMROOT}/us007003}
-FIXam=${FIXam:-$HOMEgfs/fix/am}
USHSYND=${USHSYND:-$HOMEgfs/ush}
EXECSYND=${EXECSYND:-$HOMEgfs/exec}
PARMSYND=${PARMSYND:-$HOMEgfs/parm/relo}
-slmask=${slmask:-$FIXam/syndat_slmask.t126.gaussian}
+slmask=${slmask:-${FIXgfs}/am/syndat_slmask.t126.gaussian}
copy_back=${copy_back:-YES}
files_override=${files_override:-""}
@@ -190,10 +188,10 @@ fi
echo " &INPUT RUNID = '${net}_${tmmark}_${cyc}', FILES = $files " > vitchk.inp
cat $PARMSYND/syndat_qctropcy.${RUN}.parm >> vitchk.inp
-# Copy the fixed fields from FIXam
+# Copy the fixed fields
-cp $FIXam/syndat_fildef.vit fildef.vit
-cp $FIXam/syndat_stmnames stmnames
+cp ${FIXgfs}/am/syndat_fildef.vit fildef.vit
+cp ${FIXgfs}/am/syndat_stmnames stmnames
rm -f nhc fnoc lthistry
diff --git a/ush/tropcy_relocate.sh b/ush/tropcy_relocate.sh
index 01a21bd12c..cdcc777c47 100755
--- a/ush/tropcy_relocate.sh
+++ b/ush/tropcy_relocate.sh
@@ -128,9 +128,6 @@
# Default is "${HOMERELO}/ush"
# EXECRELO String indicating directory path for RELOCATE executables
# Default is "${HOMERELO}/exec"
-# FIXRELO String indicating directory path for RELOCATE data fix-
-# field files
-# Default is "${HOMERELO}/fix"
# EXECUTIL String indicating directory path for utility program
# executables
# If the imported variable MACHINE!=sgi, then the default is
@@ -188,7 +185,7 @@
# programs :
# RELOCATE_MV_NVORTEX - executable $RELOX
# T126 GRIB global land/sea mask:
-# $FIXRELO/global_slmask.t126.grb
+# ${FIXgfs}/am/global_slmask.t126.grb
# SUPVIT - executable $SUPVX
# GETTRK - executable $GETTX
#
@@ -302,8 +299,6 @@ USHGETGES=${USHGETGES:-${USHRELO}}
EXECRELO=${EXECRELO:-${HOMERELO}/exec}
-FIXRELO=${FIXRELO:-${HOMERELO}/fix}
-
RELOX=${RELOX:-$EXECRELO/relocate_mv_nvortex}
export BKGFREQ=${BKGFREQ:-1}
diff --git a/ush/tropcy_relocate_extrkr.sh b/ush/tropcy_relocate_extrkr.sh
index ede2318c4a..c4efad1d73 100755
--- a/ush/tropcy_relocate_extrkr.sh
+++ b/ush/tropcy_relocate_extrkr.sh
@@ -1538,9 +1538,9 @@ ln -s -f ${vdir}/trak.${cmodel}.radii.${symdh} fort.63
ln -s -f ${vdir}/trak.${cmodel}.atcfunix.${symdh} fort.64
if [ $BKGFREQ -eq 1 ]; then
- ln -s -f ${FIXRELO}/${cmodel}.tracker_leadtimes_hrly fort.15
+ ln -s -f ${FIXgfs}/am/${cmodel}.tracker_leadtimes_hrly fort.15
elif [ $BKGFREQ -eq 3 ]; then
- ln -s -f ${FIXRELO}/${cmodel}.tracker_leadtimes fort.15
+ ln -s -f ${FIXgfs}/am/${cmodel}.tracker_leadtimes fort.15
fi
##$XLF_LINKSSH
diff --git a/ush/wave_grid_interp_sbs.sh b/ush/wave_grid_interp_sbs.sh
index c11a75f89d..e690cbc4ad 100755
--- a/ush/wave_grid_interp_sbs.sh
+++ b/ush/wave_grid_interp_sbs.sh
@@ -113,18 +113,18 @@ source "$HOMEgfs/ush/preamble.sh"
wht_OK='no'
if [ ! -f ${DATA}/ww3_gint.WHTGRIDINT.bin.${grdID} ]; then
- if [ -f $FIXwave/ww3_gint.WHTGRIDINT.bin.${grdID} ]
+ if [ -f ${FIXgfs}/wave/ww3_gint.WHTGRIDINT.bin.${grdID} ]
then
set +x
echo ' '
- echo " Copying $FIXwave/ww3_gint.WHTGRIDINT.bin.${grdID} "
+ echo " Copying ${FIXgfs}/wave/ww3_gint.WHTGRIDINT.bin.${grdID} "
set_trace
- cp $FIXwave/ww3_gint.WHTGRIDINT.bin.${grdID} ${DATA}
+ cp ${FIXgfs}/wave/ww3_gint.WHTGRIDINT.bin.${grdID} ${DATA}
wht_OK='yes'
else
set +x
echo ' '
- echo " Not found: $FIXwave/ww3_gint.WHTGRIDINT.bin.${grdID} "
+ echo " Not found: ${FIXgfs}/wave/ww3_gint.WHTGRIDINT.bin.${grdID} "
fi
fi
# Check and link weights file
@@ -148,7 +148,7 @@ source "$HOMEgfs/ush/preamble.sh"
if [ "wht_OK" = 'no' ]
then
cp -f ./WHTGRIDINT.bin ${DATA}/ww3_gint.WHTGRIDINT.bin.${grdID}
- cp -f ./WHTGRIDINT.bin ${FIXwave}/ww3_gint.WHTGRIDINT.bin.${grdID}
+ cp -f ./WHTGRIDINT.bin ${FIXgfs}/wave/ww3_gint.WHTGRIDINT.bin.${grdID}
fi
diff --git a/ush/wave_grid_moddef.sh b/ush/wave_grid_moddef.sh
index 5b1b212a16..390860c960 100755
--- a/ush/wave_grid_moddef.sh
+++ b/ush/wave_grid_moddef.sh
@@ -59,7 +59,7 @@ source "$HOMEgfs/ush/preamble.sh"
# 0.c Define directories and the search path.
# The tested variables should be exported by the postprocessor script.
- if [ -z "$grdID" ] || [ -z "$EXECwave" ] || [ -z "$wave_sys_ver" ]
+ if [ -z "$grdID" ] || [ -z "$EXECwave" ]
then
set +x
echo ' '
@@ -83,8 +83,16 @@ source "$HOMEgfs/ush/preamble.sh"
rm -f ww3_grid.inp
ln -sf ../ww3_grid.inp.$grdID ww3_grid.inp
+
+ if [ -f ../${grdID}.msh ]
+ then
+ rm -f ${grdID}.msh
+ ln -sf ../${grdID}.msh ${grdID}.msh
+ fi
+
+
- $EXECwave/ww3_grid 1> grid_${grdID}.out 2>&1
+ "${EXECwave}/ww3_grid" 1> "grid_${grdID}.out" 2>&1
err=$?
if [ "$err" != '0' ]
@@ -99,10 +107,10 @@ source "$HOMEgfs/ush/preamble.sh"
exit 3
fi
- if [ -f mod_def.ww3 ]
+ if [[ -f mod_def.ww3 ]]
then
cp mod_def.ww3 "${COM_WAVE_PREP}/${RUN}wave.mod_def.${grdID}"
- mv mod_def.ww3 ../mod_def.$grdID
+ mv mod_def.ww3 "../mod_def.${grdID}"
else
set +x
echo ' '
@@ -118,6 +126,6 @@ source "$HOMEgfs/ush/preamble.sh"
# 3. Clean up
cd ..
-rm -rf moddef_$grdID
+rm -rf "moddef_${grdID}"
# End of ww3_mod_def.sh ------------------------------------------------- #
diff --git a/ush/wave_prnc_cur.sh b/ush/wave_prnc_cur.sh
index 6b1ab19db2..a8f9f67d62 100755
--- a/ush/wave_prnc_cur.sh
+++ b/ush/wave_prnc_cur.sh
@@ -46,7 +46,7 @@ mv -f "cur_temp3.nc" "cur_uv_${PDY}_${fext}${fh3}_flat.nc"
# Convert to regular lat lon file
# If weights need to be regenerated due to CDO ver change, use:
# $CDO genbil,r4320x2160 rtofs_glo_2ds_f000_3hrly_prog.nc weights.nc
-cp ${FIXwave}/weights_rtofs_to_r4320x2160.nc ./weights.nc
+cp ${FIXgfs}/wave/weights_rtofs_to_r4320x2160.nc ./weights.nc
# Interpolate to regular 5 min grid
${CDO} remap,r4320x2160,weights.nc "cur_uv_${PDY}_${fext}${fh3}_flat.nc" "cur_5min_01.nc"
diff --git a/ush/wave_prnc_ice.sh b/ush/wave_prnc_ice.sh
index 5ec1d7fc2e..d0a6371d5b 100755
--- a/ush/wave_prnc_ice.sh
+++ b/ush/wave_prnc_ice.sh
@@ -55,7 +55,7 @@ source "$HOMEgfs/ush/preamble.sh"
echo "Making ice fields."
if [[ -z "${YMDH}" ]] || [[ -z "${cycle}" ]] || \
- [[ -z "${COM_WAVE_PREP}" ]] || [[ -z "${FIXwave}" ]] || [[ -z "${EXECwave}" ]] || \
+ [[ -z "${COM_WAVE_PREP}" ]] || [[ -z "${FIXgfs}" ]] || [[ -z "${EXECwave}" ]] || \
[[ -z "${WAV_MOD_TAG}" ]] || [[ -z "${WAVEICE_FID}" ]] || [[ -z "${COM_OBS}" ]]; then
set +x
diff --git a/versions/build.hera.ver b/versions/build.hera.ver
index ff85b1a801..263e967a97 100644
--- a/versions/build.hera.ver
+++ b/versions/build.hera.ver
@@ -1,3 +1,5 @@
export stack_intel_ver=2021.5.0
export stack_impi_ver=2021.5.1
+export spack_env=gsi-addon-dev
source "${HOMEgfs:-}/versions/build.spack.ver"
+export spack_mod_path="/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-${spack_stack_ver}/envs/${spack_env}/install/modulefiles/Core"
diff --git a/versions/build.hercules.ver b/versions/build.hercules.ver
index 5513466631..cab0c92111 100644
--- a/versions/build.hercules.ver
+++ b/versions/build.hercules.ver
@@ -1,3 +1,6 @@
export stack_intel_ver=2021.9.0
export stack_impi_ver=2021.9.0
+export intel_mkl_ver=2023.1.0
+export spack_env=gsi-addon-env
source "${HOMEgfs:-}/versions/build.spack.ver"
+export spack_mod_path="/work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-${spack_stack_ver}/envs/${spack_env}/install/modulefiles/Core"
diff --git a/versions/build.jet.ver b/versions/build.jet.ver
index ff85b1a801..55c0ea0bd1 100644
--- a/versions/build.jet.ver
+++ b/versions/build.jet.ver
@@ -1,3 +1,5 @@
export stack_intel_ver=2021.5.0
export stack_impi_ver=2021.5.1
+export spack_env=gsi-addon-dev
source "${HOMEgfs:-}/versions/build.spack.ver"
+export spack_mod_path="/lfs4/HFIP/hfv3gfs/role.epic/spack-stack/spack-stack-${spack_stack_ver}/envs/${spack_env}/install/modulefiles/Core"
diff --git a/versions/build.orion.ver b/versions/build.orion.ver
index ff85b1a801..df7856110d 100644
--- a/versions/build.orion.ver
+++ b/versions/build.orion.ver
@@ -1,3 +1,5 @@
-export stack_intel_ver=2021.5.0
+export stack_intel_ver=2022.0.2
export stack_impi_ver=2021.5.1
+export spack_env=gsi-addon-env
source "${HOMEgfs:-}/versions/build.spack.ver"
+export spack_mod_path="/work/noaa/epic/role-epic/spack-stack/orion/spack-stack-${spack_stack_ver}/envs/${spack_env}/install/modulefiles/Core"
diff --git a/versions/build.s4.ver b/versions/build.s4.ver
index a0aae51d87..e2731ccfb3 100644
--- a/versions/build.s4.ver
+++ b/versions/build.s4.ver
@@ -1,3 +1,5 @@
export stack_intel_ver=2021.5.0
export stack_impi_ver=2021.5.0
+export spack_env=gsi-addon-env
source "${HOMEgfs:-}/versions/build.spack.ver"
+export spack_mod_path="/data/prod/jedi/spack-stack/spack-stack-${spack_stack_ver}/envs/${spack_env}/install/modulefiles/Core"
diff --git a/versions/build.spack.ver b/versions/build.spack.ver
index fb5b244bf5..808f85dd16 100644
--- a/versions/build.spack.ver
+++ b/versions/build.spack.ver
@@ -1,5 +1,4 @@
-export spack_stack_ver=1.5.1
-export spack_env=gsi-addon
+export spack_stack_ver=1.6.0
export cmake_ver=3.23.1
@@ -11,7 +10,7 @@ export fms_ver=2023.02.01
export hdf5_ver=1.14.0
export netcdf_c_ver=4.9.2
-export netcdf_fortran_ver=4.6.0
+export netcdf_fortran_ver=4.6.1
export bacio_ver=2.4.1
export nemsio_ver=2.5.4
@@ -19,10 +18,10 @@ export sigio_ver=2.3.2
export w3emc_ver=2.10.0
export bufr_ver=11.7.0
export g2_ver=3.4.5
-export sp_ver=2.3.3
+export sp_ver=2.5.0
export ip_ver=4.3.0
export gsi_ncdiag_ver=1.1.2
export g2tmpl_ver=1.10.2
-export crtm_ver=2.4.0
+export crtm_ver=2.4.0.1
export wgrib2_ver=2.0.8
export grib_util_ver=1.3.0
diff --git a/versions/fix.ver b/versions/fix.ver
index 13d9b56dd2..a2a9caf8e3 100644
--- a/versions/fix.ver
+++ b/versions/fix.ver
@@ -10,6 +10,7 @@ export datm_ver=20220805
export gdas_crtm_ver=20220805
export gdas_fv3jedi_ver=20220805
export gdas_gsibec_ver=20221031
+export gdas_obs_ver=20240213
export glwu_ver=20220805
export gsi_ver=20230911
export lut_ver=20220805
@@ -17,6 +18,6 @@ export mom6_ver=20231219
export orog_ver=20231027
export reg2grb2_ver=20220805
export sfc_climo_ver=20220805
-export ugwd_ver=20220805
+export ugwd_ver=20231027
export verif_ver=20220805
export wave_ver=20240105
diff --git a/versions/run.hera.ver b/versions/run.hera.ver
index 43443ba715..4529d34821 100644
--- a/versions/run.hera.ver
+++ b/versions/run.hera.ver
@@ -1,13 +1,11 @@
export stack_intel_ver=2021.5.0
export stack_impi_ver=2021.5.1
+export spack_env=gsi-addon-dev
export hpss_ver=hpss
export ncl_ver=6.6.2
export R_ver=3.5.0
export gempak_ver=7.4.2
-#For metplus jobs, not currently working with spack-stack
-#export met_ver=9.1.3
-#export metplus_ver=3.1.1
-
source "${HOMEgfs:-}/versions/run.spack.ver"
+export spack_mod_path="/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-${spack_stack_ver}/envs/${spack_env}/install/modulefiles/Core"
diff --git a/versions/run.hercules.ver b/versions/run.hercules.ver
index 43f1b2181d..549bb47df0 100644
--- a/versions/run.hercules.ver
+++ b/versions/run.hercules.ver
@@ -1,12 +1,9 @@
export stack_intel_ver=2021.9.0
export stack_impi_ver=2021.9.0
export intel_mkl_ver=2023.1.0
+export spack_env=gsi-addon-env
-export ncl_ver=6.6.2
export perl_ver=5.36.0
source "${HOMEgfs:-}/versions/run.spack.ver"
-
-# wgrib2 and cdo are different on Hercules from all the other systems
-export wgrib2_ver=3.1.1
-export cdo_ver=2.2.0
+export spack_mod_path="/work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-${spack_stack_ver}/envs/${spack_env}/install/modulefiles/Core"
diff --git a/versions/run.jet.ver b/versions/run.jet.ver
index 18a82cab4f..d5b98bf514 100644
--- a/versions/run.jet.ver
+++ b/versions/run.jet.ver
@@ -1,5 +1,6 @@
export stack_intel_ver=2021.5.0
export stack_impi_ver=2021.5.1
+export spack_env=gsi-addon-dev
export hpss_ver=
export ncl_ver=6.6.2
@@ -7,3 +8,4 @@ export R_ver=4.0.2
export gempak_ver=7.4.2
source "${HOMEgfs:-}/versions/run.spack.ver"
+export spack_mod_path="/lfs4/HFIP/hfv3gfs/role.epic/spack-stack/spack-stack-${spack_stack_ver}/envs/${spack_env}/install/modulefiles/Core"
diff --git a/versions/run.orion.ver b/versions/run.orion.ver
index 7671bc028d..2fdeae8888 100644
--- a/versions/run.orion.ver
+++ b/versions/run.orion.ver
@@ -1,11 +1,12 @@
export stack_intel_ver=2022.0.2
export stack_impi_ver=2021.5.1
+export spack_env=gsi-addon-env
export ncl_ver=6.6.2
export gempak_ver=7.5.1
-#For metplus jobs, not currently working with spack-stack
-#export met_ver=9.1.3
-#export metplus_ver=3.1.1
-
source "${HOMEgfs:-}/versions/run.spack.ver"
+export spack_mod_path="/work/noaa/epic/role-epic/spack-stack/orion/spack-stack-${spack_stack_ver}/envs/${spack_env}/install/modulefiles/Core"
+
+#cdo is older on Orion
+export cdo_ver=2.0.5
diff --git a/versions/run.s4.ver b/versions/run.s4.ver
index 56817ef439..6d0f4cbaca 100644
--- a/versions/run.s4.ver
+++ b/versions/run.s4.ver
@@ -1,6 +1,8 @@
export stack_intel_ver=2021.5.0
export stack_impi_ver=2021.5.0
+export spack_env=gsi-addon-env
export ncl_ver=6.4.0-precompiled
source "${HOMEgfs:-}/versions/run.spack.ver"
+export spack_mod_path="/data/prod/jedi/spack-stack/spack-stack-${spack_stack_ver}/envs/${spack_env}/install/modulefiles/Core"
diff --git a/versions/run.spack.ver b/versions/run.spack.ver
index c1c13f58df..c00b7483cd 100644
--- a/versions/run.spack.ver
+++ b/versions/run.spack.ver
@@ -1,26 +1,31 @@
-export spack_stack_ver=1.5.1
-export spack_env=gsi-addon
-export python_ver=3.10.8
+export spack_stack_ver=1.6.0
+export python_ver=3.11.6
export jasper_ver=2.0.32
export libpng_ver=1.6.37
-export cdo_ver=2.0.5
+export cdo_ver=2.2.0
export nco_ver=5.0.6
export hdf5_ver=1.14.0
export netcdf_c_ver=4.9.2
-export netcdf_fortran_ver=4.6.0
+export netcdf_fortran_ver=4.6.1
export bufr_ver=11.7.0
export gsi_ncdiag_ver=1.1.2
export g2tmpl_ver=1.10.2
-export crtm_ver=2.4.0
+export crtm_ver=2.4.0.1
export wgrib2_ver=2.0.8
export grib_util_ver=1.3.0
-export prod_util_ver=1.2.2
+export prod_util_ver=2.1.1
export py_netcdf4_ver=1.5.8
-export py_pyyaml_ver=5.4.1
+export py_pyyaml_ver=6.0
export py_jinja2_ver=3.1.2
+export py_pandas_ver=1.5.3
+export py_python_dateutil_ver=2.8.2
+
+export met_ver=9.1.3
+export metplus_ver=3.1.1
+export py_xarray_ver=2023.7.0
export obsproc_run_ver=1.1.2
export prepobs_run_ver=1.0.1
diff --git a/versions/run.wcoss2.ver b/versions/run.wcoss2.ver
index a188cdea74..0aaad3ec3d 100644
--- a/versions/run.wcoss2.ver
+++ b/versions/run.wcoss2.ver
@@ -39,6 +39,8 @@ export g2tmpl_ver=1.10.2
export ncdiag_ver=1.0.0
export crtm_ver=2.4.0
export wgrib2_ver=2.0.8
+export met_ver=9.1.3
+export metplus_ver=3.1.1
# Development-only below
diff --git a/workflow/applications/gefs.py b/workflow/applications/gefs.py
index 9e8bb5c67e..0be4dc7124 100644
--- a/workflow/applications/gefs.py
+++ b/workflow/applications/gefs.py
@@ -20,7 +20,12 @@ def _get_app_configs(self):
configs += ['efcs']
if self.do_wave:
- configs += ['waveinit']
+ configs += ['waveinit', 'wavepostsbs', 'wavepostpnt']
+ if self.do_wave_bnd:
+ configs += ['wavepostbndpnt', 'wavepostbndpntbll']
+
+ if self.do_ocean or self.do_ice:
+ configs += ['oceanice_products']
return configs
@@ -45,6 +50,18 @@ def get_task_names(self):
if self.nens > 0:
tasks += ['efcs']
- tasks += ['atmprod']
+ tasks += ['atmos_prod']
+
+ if self.do_ocean:
+ tasks += ['ocean_prod']
+
+ if self.do_ice:
+ tasks += ['ice_prod']
+
+ if self.do_wave:
+ tasks += ['wavepostsbs']
+ if self.do_wave_bnd:
+ tasks += ['wavepostbndpnt', 'wavepostbndpntbll']
+ tasks += ['wavepostpnt']
return {f"{self._base['CDUMP']}": tasks}
diff --git a/workflow/applications/gfs_cycled.py b/workflow/applications/gfs_cycled.py
index 6dd0342a78..51b430a996 100644
--- a/workflow/applications/gfs_cycled.py
+++ b/workflow/applications/gfs_cycled.py
@@ -16,7 +16,7 @@ def __init__(self, conf: Configuration):
self.do_jediatmvar = self._base.get('DO_JEDIATMVAR', False)
self.do_jediatmens = self._base.get('DO_JEDIATMENS', False)
self.do_jediocnvar = self._base.get('DO_JEDIOCNVAR', False)
- self.do_jedilandda = self._base.get('DO_JEDILANDDA', False)
+ self.do_jedisnowda = self._base.get('DO_JEDISNOWDA', False)
self.do_mergensst = self._base.get('DO_MERGENSST', False)
self.do_vrfy_oceanda = self._base.get('DO_VRFY_OCEANDA', False)
@@ -48,8 +48,8 @@ def _get_app_configs(self):
if self.do_vrfy_oceanda:
configs += ['ocnanalvrfy']
- if self.do_ocean:
- configs += ['ocnpost']
+ if self.do_ocean or self.do_ice:
+ configs += ['oceanice_products']
configs += ['sfcanl', 'analcalc', 'fcst', 'upp', 'atmos_products', 'arch', 'cleanup']
@@ -105,8 +105,8 @@ def _get_app_configs(self):
if self.do_aero:
configs += ['aeroanlinit', 'aeroanlrun', 'aeroanlfinal']
- if self.do_jedilandda:
- configs += ['preplandobs', 'landanl']
+ if self.do_jedisnowda:
+ configs += ['prepsnowobs', 'snowanl']
if self.do_mos:
configs += ['mos_stn_prep', 'mos_grd_prep', 'mos_ext_stn_prep', 'mos_ext_grd_prep',
@@ -148,8 +148,8 @@ def get_task_names(self):
if self.do_aero:
gdas_gfs_common_tasks_before_fcst += ['aeroanlinit', 'aeroanlrun', 'aeroanlfinal']
- if self.do_jedilandda:
- gdas_gfs_common_tasks_before_fcst += ['preplandobs', 'landanl']
+ if self.do_jedisnowda:
+ gdas_gfs_common_tasks_before_fcst += ['prepsnowobs', 'snowanl']
wave_prep_tasks = ['waveinit', 'waveprep']
wave_bndpnt_tasks = ['wavepostbndpnt', 'wavepostbndpntbll']
@@ -178,7 +178,7 @@ def get_task_names(self):
if self.do_upp:
gdas_tasks += ['atmupp']
- gdas_tasks += ['atmprod']
+ gdas_tasks += ['atmos_prod']
if self.do_wave and 'gdas' in self.wave_cdumps:
if self.do_wave_bnd:
@@ -210,9 +210,15 @@ def get_task_names(self):
gfs_tasks += ['atmanlupp', 'atmanlprod', 'fcst']
+ if self.do_ocean:
+ gfs_tasks += ['ocean_prod']
+
+ if self.do_ice:
+ gfs_tasks += ['ice_prod']
+
if self.do_upp:
gfs_tasks += ['atmupp']
- gfs_tasks += ['atmprod']
+ gfs_tasks += ['atmos_prod']
if self.do_goes:
gfs_tasks += ['goesupp']
diff --git a/workflow/applications/gfs_forecast_only.py b/workflow/applications/gfs_forecast_only.py
index 1145863210..0a9648ee65 100644
--- a/workflow/applications/gfs_forecast_only.py
+++ b/workflow/applications/gfs_forecast_only.py
@@ -49,7 +49,7 @@ def _get_app_configs(self):
configs += ['awips']
if self.do_ocean or self.do_ice:
- configs += ['ocnpost']
+ configs += ['oceanice_products']
if self.do_wave:
configs += ['waveinit', 'waveprep', 'wavepostsbs', 'wavepostpnt']
@@ -100,7 +100,7 @@ def get_task_names(self):
if self.do_upp:
tasks += ['atmupp']
- tasks += ['atmprod']
+ tasks += ['atmos_prod']
if self.do_goes:
tasks += ['goesupp']
@@ -126,8 +126,11 @@ def get_task_names(self):
if self.do_awips:
tasks += ['awips_20km_1p0deg', 'awips_g2', 'fbwind']
- if self.do_ocean or self.do_ice:
- tasks += ['ocnpost']
+ if self.do_ocean:
+ tasks += ['ocean_prod']
+
+ if self.do_ice:
+ tasks += ['ice_prod']
if self.do_wave:
if self.do_wave_bnd:
diff --git a/workflow/rocoto/gefs_tasks.py b/workflow/rocoto/gefs_tasks.py
index a72753eb90..50b24f3578 100644
--- a/workflow/rocoto/gefs_tasks.py
+++ b/workflow/rocoto/gefs_tasks.py
@@ -75,7 +75,7 @@ def stage_ic(self):
def waveinit(self):
resources = self.get_resource('waveinit')
- task_name = f'waveinit'
+ task_name = f'wave_init'
task_dict = {'task_name': task_name,
'resources': resources,
'envars': self.envars,
@@ -90,14 +90,12 @@ def waveinit(self):
return task
def fcst(self):
-
- # TODO: Add real dependencies
dependencies = []
dep_dict = {'type': 'task', 'name': f'stage_ic'}
dependencies.append(rocoto.add_dependency(dep_dict))
if self.app_config.do_wave:
- dep_dict = {'type': 'task', 'name': f'waveinit'}
+ dep_dict = {'type': 'task', 'name': f'wave_init'}
dependencies.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep_condition='and', dep=dependencies)
@@ -124,7 +122,7 @@ def efcs(self):
dependencies.append(rocoto.add_dependency(dep_dict))
if self.app_config.do_wave:
- dep_dict = {'type': 'task', 'name': f'waveinit'}
+ dep_dict = {'type': 'task', 'name': f'wave_init'}
dependencies.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep_condition='and', dep=dependencies)
@@ -150,7 +148,7 @@ def efcs(self):
'maxtries': '&MAXTRIES;'
}
- member_var_dict = {'member': ' '.join([str(mem).zfill(3) for mem in range(1, self.nmem + 1)])}
+ member_var_dict = {'member': ' '.join([f"{mem:03d}" for mem in range(1, self.nmem + 1)])}
metatask_dict = {'task_name': 'fcst_ens',
'var_dict': member_var_dict,
'task_dict': task_dict
@@ -160,46 +158,231 @@ def efcs(self):
return task
- def atmprod(self):
- atm_master_path = self._template_to_rocoto_cycstring(self._base["COM_ATMOS_MASTER_TMPL"], {'MEMDIR': 'mem#member#'})
+ def atmos_prod(self):
+ return self._atmosoceaniceprod('atmos')
+
+ def ocean_prod(self):
+ return self._atmosoceaniceprod('ocean')
+
+ def ice_prod(self):
+ return self._atmosoceaniceprod('ice')
+
+ def _atmosoceaniceprod(self, component: str):
+
+ products_dict = {'atmos': {'config': 'atmos_products',
+ 'history_path_tmpl': 'COM_ATMOS_MASTER_TMPL',
+ 'history_file_tmpl': f'{self.cdump}.t@Hz.master.grb2f#fhr#'},
+ 'ocean': {'config': 'oceanice_products',
+ 'history_path_tmpl': 'COM_OCEAN_HISTORY_TMPL',
+ 'history_file_tmpl': f'{self.cdump}.ocean.t@Hz.6hr_avg.f#fhr#.nc'},
+ 'ice': {'config': 'oceanice_products',
+ 'history_path_tmpl': 'COM_ICE_HISTORY_TMPL',
+ 'history_file_tmpl': f'{self.cdump}.ice.t@Hz.6hr_avg.f#fhr#.nc'}}
+
+ component_dict = products_dict[component]
+ config = component_dict['config']
+ history_path_tmpl = component_dict['history_path_tmpl']
+ history_file_tmpl = component_dict['history_file_tmpl']
+
+ resources = self.get_resource(config)
+
+ history_path = self._template_to_rocoto_cycstring(self._base[history_path_tmpl], {'MEMDIR': 'mem#member#'})
deps = []
- data = f'{atm_master_path}/{self.cdump}.t@Hz.master.grb2f#fhr#'
+ data = f'{history_path}/{history_file_tmpl}'
dep_dict = {'type': 'data', 'data': data, 'age': 120}
deps.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep=deps)
- atm_prod_envars = self.envars.copy()
+ postenvars = self.envars.copy()
postenvar_dict = {'ENSMEM': '#member#',
'MEMDIR': 'mem#member#',
'FHRLST': '#fhr#',
+ 'COMPONENT': component}
+ for key, value in postenvar_dict.items():
+ postenvars.append(rocoto.create_envar(name=key, value=str(value)))
+
+ task_name = f'{component}_prod_mem#member#_f#fhr#'
+ task_dict = {'task_name': task_name,
+ 'resources': resources,
+ 'dependency': dependencies,
+ 'envars': postenvars,
+ 'cycledef': 'gefs',
+ 'command': f'{self.HOMEgfs}/jobs/rocoto/{config}.sh',
+ 'job_name': f'{self.pslot}_{task_name}_@H',
+ 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
+ 'maxtries': '&MAXTRIES;'}
+
+ fhrs = self._get_forecast_hours('gefs', self._configs[config])
+
+ # ocean/ice components do not have fhr 0 as they are averaged output
+ if component in ['ocean', 'ice']:
+ fhrs.remove(0)
+
+ fhr_var_dict = {'fhr': ' '.join([f"{fhr:03d}" for fhr in fhrs])}
+
+ fhr_metatask_dict = {'task_name': f'{component}_prod_#member#',
+ 'task_dict': task_dict,
+ 'var_dict': fhr_var_dict}
+
+ member_var_dict = {'member': ' '.join([f"{mem:03d}" for mem in range(0, self.nmem + 1)])}
+ member_metatask_dict = {'task_name': f'{component}_prod',
+ 'task_dict': fhr_metatask_dict,
+ 'var_dict': member_var_dict}
+
+ task = rocoto.create_task(member_metatask_dict)
+
+ return task
+
+ def wavepostsbs(self):
+ deps = []
+ for wave_grid in self._configs['wavepostsbs']['waveGRD'].split():
+ wave_hist_path = self._template_to_rocoto_cycstring(self._base["COM_WAVE_HISTORY_TMPL"], {'MEMDIR': 'mem#member#'})
+ data = f'{wave_hist_path}/gefswave.out_grd.{wave_grid}.@Y@m@d.@H0000'
+ dep_dict = {'type': 'data', 'data': data}
+ deps.append(rocoto.add_dependency(dep_dict))
+ dependencies = rocoto.create_dependency(dep_condition='and', dep=deps)
+
+ wave_post_envars = self.envars.copy()
+ postenvar_dict = {'ENSMEM': '#member#',
+ 'MEMDIR': 'mem#member#',
}
for key, value in postenvar_dict.items():
- atm_prod_envars.append(rocoto.create_envar(name=key, value=str(value)))
+ wave_post_envars.append(rocoto.create_envar(name=key, value=str(value)))
- resources = self.get_resource('atmos_products')
+ resources = self.get_resource('wavepostsbs')
- task_name = f'atm_prod_mem#member#_f#fhr#'
+ task_name = f'wave_post_grid_mem#member#'
task_dict = {'task_name': task_name,
'resources': resources,
'dependency': dependencies,
- 'envars': atm_prod_envars,
+ 'envars': wave_post_envars,
'cycledef': 'gefs',
- 'command': f'{self.HOMEgfs}/jobs/rocoto/atmos_products.sh',
+ 'command': f'{self.HOMEgfs}/jobs/rocoto/wavepostsbs.sh',
'job_name': f'{self.pslot}_{task_name}_@H',
'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
'maxtries': '&MAXTRIES;'
}
- fhr_var_dict = {'fhr': ' '.join([str(fhr).zfill(3) for fhr in
- self._get_forecast_hours('gefs', self._configs['atmos_products'])])}
- fhr_metatask_dict = {'task_name': 'atm_prod_#member#',
- 'task_dict': task_dict,
- 'var_dict': fhr_var_dict
- }
+ member_var_dict = {'member': ' '.join([str(mem).zfill(3) for mem in range(0, self.nmem + 1)])}
+ member_metatask_dict = {'task_name': 'wave_post_grid',
+ 'task_dict': task_dict,
+ 'var_dict': member_var_dict
+ }
+
+ task = rocoto.create_task(member_metatask_dict)
+
+ return task
+
+ def wavepostbndpnt(self):
+ deps = []
+ dep_dict = {'type': 'task', 'name': f'fcst_mem#member#'}
+ deps.append(rocoto.add_dependency(dep_dict))
+ dependencies = rocoto.create_dependency(dep=deps)
+
+ wave_post_bndpnt_envars = self.envars.copy()
+ postenvar_dict = {'ENSMEM': '#member#',
+ 'MEMDIR': 'mem#member#',
+ }
+ for key, value in postenvar_dict.items():
+ wave_post_bndpnt_envars.append(rocoto.create_envar(name=key, value=str(value)))
+
+ resources = self.get_resource('wavepostbndpnt')
+ task_name = f'wave_post_bndpnt_mem#member#'
+ task_dict = {'task_name': task_name,
+ 'resources': resources,
+ 'dependency': dependencies,
+ 'envars': wave_post_bndpnt_envars,
+ 'cycledef': 'gefs',
+ 'command': f'{self.HOMEgfs}/jobs/rocoto/wavepostbndpnt.sh',
+ 'job_name': f'{self.pslot}_{task_name}_@H',
+ 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
+ 'maxtries': '&MAXTRIES;'
+ }
member_var_dict = {'member': ' '.join([str(mem).zfill(3) for mem in range(0, self.nmem + 1)])}
- member_metatask_dict = {'task_name': 'atm_prod',
- 'task_dict': fhr_metatask_dict,
+ member_metatask_dict = {'task_name': 'wave_post_bndpnt',
+ 'task_dict': task_dict,
+ 'var_dict': member_var_dict
+ }
+
+ task = rocoto.create_task(member_metatask_dict)
+
+ return task
+
+ def wavepostbndpntbll(self):
+ deps = []
+ atmos_hist_path = self._template_to_rocoto_cycstring(self._base["COM_ATMOS_HISTORY_TMPL"], {'MEMDIR': 'mem#member#'})
+ # Is there any reason this is 180?
+ data = f'{atmos_hist_path}/{self.cdump}.t@Hz.atm.logf180.txt'
+ dep_dict = {'type': 'data', 'data': data}
+ deps.append(rocoto.add_dependency(dep_dict))
+
+ dep_dict = {'type': 'task', 'name': f'fcst_mem#member#'}
+ deps.append(rocoto.add_dependency(dep_dict))
+ dependencies = rocoto.create_dependency(dep_condition='or', dep=deps)
+
+ wave_post_bndpnt_bull_envars = self.envars.copy()
+ postenvar_dict = {'ENSMEM': '#member#',
+ 'MEMDIR': 'mem#member#',
+ }
+ for key, value in postenvar_dict.items():
+ wave_post_bndpnt_bull_envars.append(rocoto.create_envar(name=key, value=str(value)))
+
+ resources = self.get_resource('wavepostbndpntbll')
+ task_name = f'wave_post_bndpnt_bull_mem#member#'
+ task_dict = {'task_name': task_name,
+ 'resources': resources,
+ 'dependency': dependencies,
+ 'envars': wave_post_bndpnt_bull_envars,
+ 'cycledef': 'gefs',
+ 'command': f'{self.HOMEgfs}/jobs/rocoto/wavepostbndpntbll.sh',
+ 'job_name': f'{self.pslot}_{task_name}_@H',
+ 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
+ 'maxtries': '&MAXTRIES;'
+ }
+
+ member_var_dict = {'member': ' '.join([str(mem).zfill(3) for mem in range(0, self.nmem + 1)])}
+ member_metatask_dict = {'task_name': 'wave_post_bndpnt_bull',
+ 'task_dict': task_dict,
+ 'var_dict': member_var_dict
+ }
+
+ task = rocoto.create_task(member_metatask_dict)
+
+ return task
+
+ def wavepostpnt(self):
+ deps = []
+ dep_dict = {'type': 'task', 'name': f'fcst_mem#member#'}
+ deps.append(rocoto.add_dependency(dep_dict))
+ if self.app_config.do_wave_bnd:
+ dep_dict = {'type': 'task', 'name': f'wave_post_bndpnt_bull_mem#member#'}
+ deps.append(rocoto.add_dependency(dep_dict))
+ dependencies = rocoto.create_dependency(dep_condition='and', dep=deps)
+
+ wave_post_pnt_envars = self.envars.copy()
+ postenvar_dict = {'ENSMEM': '#member#',
+ 'MEMDIR': 'mem#member#',
+ }
+ for key, value in postenvar_dict.items():
+ wave_post_pnt_envars.append(rocoto.create_envar(name=key, value=str(value)))
+
+ resources = self.get_resource('wavepostpnt')
+ task_name = f'wave_post_pnt_mem#member#'
+ task_dict = {'task_name': task_name,
+ 'resources': resources,
+ 'dependency': dependencies,
+ 'envars': wave_post_pnt_envars,
+ 'cycledef': 'gefs',
+ 'command': f'{self.HOMEgfs}/jobs/rocoto/wavepostpnt.sh',
+ 'job_name': f'{self.pslot}_{task_name}_@H',
+ 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
+ 'maxtries': '&MAXTRIES;'
+ }
+
+ member_var_dict = {'member': ' '.join([str(mem).zfill(3) for mem in range(0, self.nmem + 1)])}
+ member_metatask_dict = {'task_name': 'wave_post_pnt',
+ 'task_dict': task_dict,
'var_dict': member_var_dict
}
diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py
index 9102c74e35..b91e974c74 100644
--- a/workflow/rocoto/gfs_tasks.py
+++ b/workflow/rocoto/gfs_tasks.py
@@ -99,7 +99,7 @@ def prep(self):
gfs_enkf = True if self.app_config.do_hybvar and 'gfs' in self.app_config.eupd_cdumps else False
deps = []
- dep_dict = {'type': 'metatask', 'name': 'gdasatmprod', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"}
+ dep_dict = {'type': 'metatask', 'name': 'gdasatmos_prod', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"}
deps.append(rocoto.add_dependency(dep_dict))
data = f'{atm_hist_path}/gdas.t@Hz.atmf009.nc'
dep_dict = {'type': 'data', 'data': data, 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"}
@@ -271,8 +271,8 @@ def sfcanl(self):
else:
dep_dict = {'type': 'task', 'name': f'{self.cdump}anal'}
deps.append(rocoto.add_dependency(dep_dict))
- if self.app_config.do_jedilandda:
- dep_dict = {'type': 'task', 'name': f'{self.cdump}landanl'}
+ if self.app_config.do_jedisnowda:
+ dep_dict = {'type': 'task', 'name': f'{self.cdump}snowanl'}
deps.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep_condition='and', dep=deps)
else:
@@ -531,21 +531,21 @@ def aeroanlfinal(self):
return task
- def preplandobs(self):
+ def prepsnowobs(self):
deps = []
dep_dict = {'type': 'task', 'name': f'{self.cdump}prep'}
deps.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep=deps)
- resources = self.get_resource('preplandobs')
- task_name = f'{self.cdump}preplandobs'
+ resources = self.get_resource('prepsnowobs')
+ task_name = f'{self.cdump}prepsnowobs'
task_dict = {'task_name': task_name,
'resources': resources,
'dependency': dependencies,
'envars': self.envars,
'cycledef': self.cdump.replace('enkf', ''),
- 'command': f'{self.HOMEgfs}/jobs/rocoto/preplandobs.sh',
+ 'command': f'{self.HOMEgfs}/jobs/rocoto/prepsnowobs.sh',
'job_name': f'{self.pslot}_{task_name}_@H',
'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
'maxtries': '&MAXTRIES;'
@@ -555,21 +555,21 @@ def preplandobs(self):
return task
- def landanl(self):
+ def snowanl(self):
deps = []
- dep_dict = {'type': 'task', 'name': f'{self.cdump}preplandobs'}
+ dep_dict = {'type': 'task', 'name': f'{self.cdump}prepsnowobs'}
deps.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep=deps)
- resources = self.get_resource('landanl')
- task_name = f'{self.cdump}landanl'
+ resources = self.get_resource('snowanl')
+ task_name = f'{self.cdump}snowanl'
task_dict = {'task_name': task_name,
'resources': resources,
'dependency': dependencies,
'envars': self.envars,
'cycledef': self.cdump.replace('enkf', ''),
- 'command': f'{self.HOMEgfs}/jobs/rocoto/landanl.sh',
+ 'command': f'{self.HOMEgfs}/jobs/rocoto/snowanl.sh',
'job_name': f'{self.pslot}_{task_name}_@H',
'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
'maxtries': '&MAXTRIES;'
@@ -583,7 +583,7 @@ def prepoceanobs(self):
ocean_hist_path = self._template_to_rocoto_cycstring(self._base["COM_OCEAN_HISTORY_TMPL"], {'RUN': 'gdas'})
deps = []
- data = f'{ocean_hist_path}/gdas.t@Hz.ocnf009.nc'
+ data = f'{ocean_hist_path}/gdas.ocean.t@Hz.inst.f009.nc'
dep_dict = {'type': 'data', 'data': data, 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"}
deps.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep=deps)
@@ -826,8 +826,8 @@ def _fcst_cycled(self):
dep_dict = {'type': 'task', 'name': f'{self.cdump}aeroanlfinal'}
dependencies.append(rocoto.add_dependency(dep_dict))
- if self.app_config.do_jedilandda:
- dep_dict = {'type': 'task', 'name': f'{self.cdump}landanl'}
+ if self.app_config.do_jedisnowda:
+ dep_dict = {'type': 'task', 'name': f'{self.cdump}snowanl'}
dependencies.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep_condition='and', dep=dependencies)
@@ -927,9 +927,9 @@ def atmanlprod(self):
return task
@staticmethod
- def _get_ufs_postproc_grps(cdump, config):
+ def _get_ufs_postproc_grps(cdump, config, component='atmos'):
- fhrs = Tasks._get_forecast_hours(cdump, config)
+ fhrs = Tasks._get_forecast_hours(cdump, config, component=component)
nfhrs_per_grp = config.get('NFHRS_PER_GROUP', 1)
ngrps = len(fhrs) // nfhrs_per_grp if len(fhrs) % nfhrs_per_grp == 0 else len(fhrs) // nfhrs_per_grp + 1
@@ -1002,83 +1002,63 @@ def _upptask(self, upp_run="forecast", task_id="atmupp"):
return task
- def atmprod(self):
+ def atmos_prod(self):
+ return self._atmosoceaniceprod('atmos')
- varname1, varname2, varname3 = 'grp', 'dep', 'lst'
- varval1, varval2, varval3 = self._get_ufs_postproc_grps(self.cdump, self._configs['atmos_products'])
- var_dict = {varname1: varval1, varname2: varval2, varname3: varval3}
-
- postenvars = self.envars.copy()
- postenvar_dict = {'FHRLST': '#lst#'}
- for key, value in postenvar_dict.items():
- postenvars.append(rocoto.create_envar(name=key, value=str(value)))
-
- atm_master_path = self._template_to_rocoto_cycstring(self._base["COM_ATMOS_MASTER_TMPL"])
- deps = []
- data = f'{atm_master_path}/{self.cdump}.t@Hz.master.grb2#dep#'
- dep_dict = {'type': 'data', 'data': data, 'age': 120}
- deps.append(rocoto.add_dependency(dep_dict))
- dependencies = rocoto.create_dependency(dep=deps)
- cycledef = 'gdas_half,gdas' if self.cdump in ['gdas'] else self.cdump
- resources = self.get_resource('atmos_products')
-
- task_name = f'{self.cdump}atmprod#{varname1}#'
- task_dict = {'task_name': task_name,
- 'resources': resources,
- 'dependency': dependencies,
- 'envars': postenvars,
- 'cycledef': cycledef,
- 'command': f'{self.HOMEgfs}/jobs/rocoto/atmos_products.sh',
- 'job_name': f'{self.pslot}_{task_name}_@H',
- 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
- 'maxtries': '&MAXTRIES;'
- }
+ def ocean_prod(self):
+ return self._atmosoceaniceprod('ocean')
- metatask_dict = {'task_name': f'{self.cdump}atmprod',
- 'task_dict': task_dict,
- 'var_dict': var_dict
- }
+ def ice_prod(self):
+ return self._atmosoceaniceprod('ice')
- task = rocoto.create_task(metatask_dict)
+ def _atmosoceaniceprod(self, component: str):
- return task
+ products_dict = {'atmos': {'config': 'atmos_products',
+ 'history_path_tmpl': 'COM_ATMOS_MASTER_TMPL',
+ 'history_file_tmpl': f'{self.cdump}.t@Hz.master.grb2#dep#'},
+ 'ocean': {'config': 'oceanice_products',
+ 'history_path_tmpl': 'COM_OCEAN_HISTORY_TMPL',
+ 'history_file_tmpl': f'{self.cdump}.ocean.t@Hz.6hr_avg.#dep#.nc'},
+ 'ice': {'config': 'oceanice_products',
+ 'history_path_tmpl': 'COM_ICE_HISTORY_TMPL',
+ 'history_file_tmpl': f'{self.cdump}.ice.t@Hz.6hr_avg.#dep#.nc'}}
- def ocnpost(self):
+ component_dict = products_dict[component]
+ config = component_dict['config']
+ history_path_tmpl = component_dict['history_path_tmpl']
+ history_file_tmpl = component_dict['history_file_tmpl']
varname1, varname2, varname3 = 'grp', 'dep', 'lst'
- varval1, varval2, varval3 = self._get_ufs_postproc_grps(self.cdump, self._configs['ocnpost'])
+ varval1, varval2, varval3 = self._get_ufs_postproc_grps(self.cdump, self._configs[config], component=component)
var_dict = {varname1: varval1, varname2: varval2, varname3: varval3}
postenvars = self.envars.copy()
- postenvar_dict = {'FHRLST': '#lst#',
- 'ROTDIR': self.rotdir}
+ postenvar_dict = {'FHRLST': '#lst#', 'COMPONENT': component}
for key, value in postenvar_dict.items():
postenvars.append(rocoto.create_envar(name=key, value=str(value)))
+ history_path = self._template_to_rocoto_cycstring(self._base[history_path_tmpl])
deps = []
- atm_hist_path = self._template_to_rocoto_cycstring(self._base["COM_ATMOS_HISTORY_TMPL"])
- data = f'{atm_hist_path}/{self.cdump}.t@Hz.atm.log#dep#.txt'
- dep_dict = {'type': 'data', 'data': data}
- deps.append(rocoto.add_dependency(dep_dict))
- dep_dict = {'type': 'task', 'name': f'{self.cdump}fcst'}
+ data = f'{history_path}/{history_file_tmpl}'
+ dep_dict = {'type': 'data', 'data': data, 'age': 120}
deps.append(rocoto.add_dependency(dep_dict))
- dependencies = rocoto.create_dependency(dep_condition='or', dep=deps)
+ dependencies = rocoto.create_dependency(dep=deps)
cycledef = 'gdas_half,gdas' if self.cdump in ['gdas'] else self.cdump
- resources = self.get_resource('ocnpost')
+ resources = self.get_resource(component_dict['config'])
- task_name = f'{self.cdump}ocnpost#{varname1}#'
+ task_name = f'{self.cdump}{component}_prod#{varname1}#'
task_dict = {'task_name': task_name,
'resources': resources,
'dependency': dependencies,
'envars': postenvars,
'cycledef': cycledef,
- 'command': f'{self.HOMEgfs}/jobs/rocoto/ocnpost.sh',
+ 'command': f"{self.HOMEgfs}/jobs/rocoto/{config}.sh",
'job_name': f'{self.pslot}_{task_name}_@H',
'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
'maxtries': '&MAXTRIES;'
}
- metatask_dict = {'task_name': f'{self.cdump}ocnpost',
+ metatask_dict = {'task_name': f'{self.cdump}{component}_prod',
'task_dict': task_dict,
'var_dict': var_dict
}
@@ -1357,7 +1337,7 @@ def _get_awipsgroups(cdump, config):
def awips_20km_1p0deg(self):
deps = []
- dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'}
+ dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmos_prod'}
deps.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep=deps)
@@ -1398,7 +1378,7 @@ def awips_20km_1p0deg(self):
def awips_g2(self):
deps = []
- dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'}
+ dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmos_prod'}
deps.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep=deps)
@@ -1439,7 +1419,7 @@ def awips_g2(self):
def gempak(self):
deps = []
- dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'}
+ dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmos_prod'}
deps.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep=deps)
@@ -1462,7 +1442,7 @@ def gempak(self):
def gempakmeta(self):
deps = []
- dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'}
+ dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmos_prod'}
deps.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep=deps)
@@ -1485,7 +1465,7 @@ def gempakmeta(self):
def gempakmetancdc(self):
deps = []
- dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'}
+ dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmos_prod'}
deps.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep=deps)
@@ -1508,7 +1488,7 @@ def gempakmetancdc(self):
def gempakncdcupapgif(self):
deps = []
- dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'}
+ dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmos_prod'}
deps.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep=deps)
@@ -1647,7 +1627,7 @@ def vminmon(self):
def tracker(self):
deps = []
- dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'}
+ dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmos_prod'}
deps.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep=deps)
@@ -1670,7 +1650,7 @@ def tracker(self):
def genesis(self):
deps = []
- dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'}
+ dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmos_prod'}
deps.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep=deps)
@@ -1693,7 +1673,7 @@ def genesis(self):
def genesis_fsu(self):
deps = []
- dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'}
+ dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmos_prod'}
deps.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep=deps)
@@ -1716,7 +1696,7 @@ def genesis_fsu(self):
def fit2obs(self):
deps = []
- dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'}
+ dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmos_prod'}
deps.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep=deps)
@@ -1781,7 +1761,7 @@ def metp(self):
def mos_stn_prep(self):
deps = []
- dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'}
+ dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmos_prod'}
deps.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep=deps)
@@ -1804,7 +1784,7 @@ def mos_stn_prep(self):
def mos_grd_prep(self):
deps = []
- dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'}
+ dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmos_prod'}
deps.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep=deps)
@@ -1827,7 +1807,7 @@ def mos_grd_prep(self):
def mos_ext_stn_prep(self):
deps = []
- dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'}
+ dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmos_prod'}
deps.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep=deps)
@@ -1850,7 +1830,7 @@ def mos_ext_stn_prep(self):
def mos_ext_grd_prep(self):
deps = []
- dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'}
+ dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmos_prod'}
deps.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep=deps)
@@ -2168,7 +2148,7 @@ def arch(self):
dep_dict = {'type': 'task', 'name': f'{self.cdump}genesis_fsu'}
deps.append(rocoto.add_dependency(dep_dict))
# Post job dependencies
- dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'}
+ dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmos_prod'}
deps.append(rocoto.add_dependency(dep_dict))
if self.app_config.do_wave:
dep_dict = {'type': 'task', 'name': f'{self.cdump}wavepostsbs'}
@@ -2179,8 +2159,12 @@ def arch(self):
dep_dict = {'type': 'task', 'name': f'{self.cdump}wavepostbndpnt'}
deps.append(rocoto.add_dependency(dep_dict))
if self.app_config.do_ocean:
- if self.app_config.mode in ['forecast-only']: # TODO: fix ocnpost to run in cycled mode
- dep_dict = {'type': 'metatask', 'name': f'{self.cdump}ocnpost'}
+ if self.cdump in ['gfs']:
+ dep_dict = {'type': 'metatask', 'name': f'{self.cdump}ocean_prod'}
+ deps.append(rocoto.add_dependency(dep_dict))
+ if self.app_config.do_ice:
+ if self.cdump in ['gfs']:
+ dep_dict = {'type': 'metatask', 'name': f'{self.cdump}ice_prod'}
deps.append(rocoto.add_dependency(dep_dict))
# MOS job dependencies
if self.cdump in ['gfs'] and self.app_config.do_mos:
diff --git a/workflow/rocoto/tasks.py b/workflow/rocoto/tasks.py
index 110dc286b5..ac70c6e4fc 100644
--- a/workflow/rocoto/tasks.py
+++ b/workflow/rocoto/tasks.py
@@ -20,10 +20,10 @@ class Tasks:
'eobs', 'eomg', 'epos', 'esfc', 'eupd',
'atmensanlinit', 'atmensanlrun', 'atmensanlfinal',
'aeroanlinit', 'aeroanlrun', 'aeroanlfinal',
- 'preplandobs', 'landanl',
+ 'prepsnowobs', 'snowanl',
'fcst',
- 'atmanlupp', 'atmanlprod', 'atmupp', 'atmprod', 'goesupp',
- 'ocnpost',
+ 'atmanlupp', 'atmanlprod', 'atmupp', 'goesupp',
+ 'atmosprod', 'oceanprod', 'iceprod',
'verfozn', 'verfrad', 'vminmon',
'metp',
'tracker', 'genesis', 'genesis_fsu',
@@ -120,23 +120,37 @@ def _template_to_rocoto_cycstring(self, template: str, subs_dict: dict = {}) ->
rocoto_conversion_dict.get)
@staticmethod
- def _get_forecast_hours(cdump, config) -> List[str]:
- fhmin = config['FHMIN']
- fhmax = config['FHMAX']
- fhout = config['FHOUT']
+ def _get_forecast_hours(cdump, config, component='atmos') -> List[str]:
+ # Make a local copy of the config to avoid modifying the original
+ local_config = config.copy()
+
+ # Ocean/Ice components do not have a HF output option like the atmosphere
+ if component in ['ocean', 'ice']:
+ local_config['FHMAX_HF_GFS'] = config['FHMAX_GFS']
+ local_config['FHOUT_HF_GFS'] = config['FHOUT_OCNICE_GFS']
+ local_config['FHOUT_GFS'] = config['FHOUT_OCNICE_GFS']
+ local_config['FHOUT'] = config['FHOUT_OCNICE']
+
+ fhmin = local_config['FHMIN']
# Get a list of all forecast hours
fhrs = []
if cdump in ['gdas']:
- fhrs = range(fhmin, fhmax + fhout, fhout)
+ fhmax = local_config['FHMAX']
+ fhout = local_config['FHOUT']
+ fhrs = list(range(fhmin, fhmax + fhout, fhout))
elif cdump in ['gfs', 'gefs']:
- fhmax = config['FHMAX_GFS']
- fhout = config['FHOUT_GFS']
- fhmax_hf = config['FHMAX_HF_GFS']
- fhout_hf = config['FHOUT_HF_GFS']
+ fhmax = local_config['FHMAX_GFS']
+ fhout = local_config['FHOUT_GFS']
+ fhmax_hf = local_config['FHMAX_HF_GFS']
+ fhout_hf = local_config['FHOUT_HF_GFS']
fhrs_hf = range(fhmin, fhmax_hf + fhout_hf, fhout_hf)
fhrs = list(fhrs_hf) + list(range(fhrs_hf[-1] + fhout, fhmax + fhout, fhout))
+ # ocean/ice components do not have fhr 0 as they are averaged output
+ if component in ['ocean', 'ice']:
+ fhrs.remove(0)
+
return fhrs
def get_resource(self, task_name):