Skip to content
Snippets Groups Projects
Commit d12499ec authored by Michael Blaschek's avatar Michael Blaschek :bicyclist:
Browse files

wrf update

parent 53eaae40
No related branches found
No related tags found
No related merge requests found
Showing
with 1259 additions and 203 deletions
Bootstrap: library
From: mblaschek/imgw/ubuntu:18.04
%labels
APPLICATION_NAME miniconda3
APPLICATION_VERSION py39-4.9.2-Linux-x86_64
APPLICATION_URL https://docs.conda.io
AUTHOR_NAME Michael Blaschek
AUTHOR_EMAIL michael.blaschek@univie.ac.at
LAST_UPDATED 20211118
%setup
%environment
# Set the conda distribution type, its version number, the python
# version it utilizes, the root and installation directories where
# the distribution will be installed within the container, and the
# root URL to the installer
export CONDA_DISTRIBUTION='miniconda'
export CONDA_VERSION='3'
export CONDA_PYTHON_VERSION='py39'
export CONDA_INSTALLER_VERSION='4.9.2'
export CONDA_ARCH='Linux-x86_64'
export CONDA_INSTALL_DIR="/opt/${CONDA_DISTRIBUTION}${CONDA_VERSION}"
# Set PATH to conda distribution
export PATH="${CONDA_INSTALL_DIR}/bin:${PATH}"
%post -c /bin/bash
# Set operating system mirror URL
export MIRRORURL='http://at.archive.ubuntu.com/ubuntu'
# Set operating system version
export OSVERSION='bionic'
# Set system locale
export LC_ALL='C'
# Set debian frontend interface
export DEBIAN_FRONTEND='noninteractive'
# Upgrade all software packages to their latest versions
apt-get -y update && apt-get -y upgrade
cd /tmp
# Set the conda distribution type, its version number, the python
# version it utilizes, the root and installation directories where
# the distribution will be installed within the container, and the
# root URL to the installer
export CONDA_DISTRIBUTION='miniconda'
export CONDA_VERSION='3'
export CONDA_PYTHON_VERSION='py39'
export CONDA_INSTALLER_VERSION='4.9.2'
export CONDA_ARCH='Linux-x86_64'
export CONDA_INSTALLER="${CONDA_DISTRIBUTION^}${CONDA_VERSION}-${CONDA_PYTHON_VERSION}_${CONDA_INSTALLER_VERSION}-${CONDA_ARCH}.sh"
export CONDA_INSTALL_DIR="/opt/${CONDA_DISTRIBUTION}${CONDA_VERSION}"
export CONDA_ROOT_URL='https://repo.anaconda.com'
# Download and install conda distribution
wget "${CONDA_ROOT_URL}/${CONDA_DISTRIBUTION}/${CONDA_INSTALLER}"
chmod +x "${CONDA_INSTALLER}"
"./${CONDA_INSTALLER}" -b -p "${CONDA_INSTALL_DIR}"
# Remove conda installer
rm "${CONDA_INSTALLER}"
# Add MPI Package from conda-forge
# ucx
# openmpi
$CONDA_INSTALL_DIR/bin/conda install -y -c conda-forge ucx openmpi mpi4py
# Cleanup
apt-get -y autoremove --purge
apt-get -y clean
# Update database for mlocate
updatedb
%files
%runscript
%test
Bootstrap: docker
From: mambaorg/micromamba:latest
%labels
maintainer IT-IMGW <it.img-wien@univie.ac.at>
%files
$PWD/definition-files/runscript /.singularity.d/runscript
$PWD/definition-files/run-help /.singularity.d/runscript.help
%apprun mpitest
nproc=4
if [ $# -eq 1 ]; then
nproc=$1
fi
echo "Running MPITest : $nproc"
exec mpirun -np $nproc mpitest.x
%appfiles mpitest
./definition-files/MPI/mpitest.c ./mpitest.c
%appinstall mpitest
export PATH=/opt/conda/bin:$PATH
export LIBRARY=/opt/conda/lib
export INCLUDE=/opt/conda/include
mpicc mpitest.c -o mpitest.x
%environment
export LANG=C.UTF-8
export PATH=/opt/conda/bin:$PATH
export LIBRARY=/opt/conda/lib
export INCLUDE=/opt/conda/include
%post
micromamba -q install -y -n base -c conda-forge \
python=3.9 \
ucx \
openmpi \
mpi4py \
&& micromamba clean --all --yes
# command prompt name
CNAME=m.ompi
# does not work goes into /.singularity.d/env/91-environment.sh
echo "export PS1=\"[IMGW-$CNAME]\w\$ \"" >> /.singularity.d/env/99-zz-custom-env.sh
# add some labels
echo "libc $(ldd --version | head -n1 | cut -d' ' -f4)" >> "$SINGULARITY_LABELS"
echo "linux $(cat /etc/os-release | grep PRETTY_NAME | cut -d'=' -f2)" >> "$SINGULARITY_LABELS"
%test
mpicc --version
Bootstrap: docker Bootstrap: docker
From: texlive:latest From: texlive/texlive:latest
%labels %labels
maintainer IT-IMGW <it.img-wien@univie.ac.at> maintainer IT-IMGW <it.img-wien@univie.ac.at>
......
...@@ -11,6 +11,7 @@ apt install -y \ ...@@ -11,6 +11,7 @@ apt install -y \
python3 \ python3 \
gcc-6 \ gcc-6 \
gfortran-6 \ gfortran-6 \
libopenmpi-dev \
libmpich-dev \ libmpich-dev \
libhdf5-dev \ libhdf5-dev \
libnetcdf-dev \ libnetcdf-dev \
......
# WRF in a container
:construction:
This is still experimental. No guarantees.
Please find the following containers available for download:
- WRF
- GSI
- WRFpy
- WRF.dev
## Design
build a container with all requirements installed and make it easily available to users to further develop. We will use a two stage build to complete the development
1. WRF sandbox / requirements
2. Build container
3. Final container
Build some special containers, that are most requested, e.g. em_real and em_les
## Example: Hurricane Sandy
## Performance tests
We ran the same container:
- VSC
- JET
- SRVX1
Multi-Node?
# Development
If you like to build your own WRF container with customized source code or a different setting/purpose, then you can find here some guidance on how to do this.
Steps:
1. Pull the WRF development container from
2. Pull the source code of WRF / WPS ...
3. Configure the Source code to use the libraries inside the container
4. Test your settings / compilation
5. Build your own container from the WRF development container and your SRC Code.
Bootstrap: localimage
From: ../../containers/alma8.base.sif
%labels
maintainer IT-IMGW <it.img-wien@univie.ac.at>
baseimage AlmaLinux8
%post
# Every line will be a layer in the container
# See https://fedoraproject.org/wiki/EPEL#Quickstart for powertools
# yum --enablerepo epel groupinstall -y "Development Tools" \
yum update -y \
&& yum install -y dnf-plugins-core \
&& dnf config-manager --set-enabled powertools \
&& yum install -y epel-release \
&& yum update -y \
&& yum --enablerepo epel install -y \
curl wget \
file \
findutils \
gcc-c++ \
gcc \
gcc-gfortran \
glibc.i686 libgcc.i686 \
libpng-devel jasper-libs jasper-devel \
m4 make perl cmake \
flex flex-devel bison bison-devel \
libcurl-devel \
libxml2 libxml2-devel perl-XML-LibXML ImageMagick \
python3 python3-pip python3-devel \
tar bash tcsh time which zlib zlib-devel \
git scl \
gnupg2 \
hostname \
iproute \
patch \
openmpi-devel \
openmpi \
hdf5-openmpi-devel \
hdf5-openmpi-static \
netcdf-openmpi-devel \
netcdf-openmpi-static \
netcdf-fortran-openmpi-devel \
netcdf-fortran-openmpi-static \
openblas-devel.x86_64 \
openblas-openmp.x86_64 \
&& rm -rf /var/cache/yum \
&& yum clean all \
&& dnf clean all \
&& rm -rf /usr/share/doc \
&& rm -rf /usr/share/man \
&& ln -s /usr/include/openmpi-x86_64/ /usr/lib64/openmpi/include
# command prompt name
CNAME=wrf.dev
# does not work goes into /.singularity.d/env/91-environment.sh
echo "export PS1=\"[IMGW-$CNAME]\w\$ \"" >> /.singularity.d/env/99-zz-custom-env.sh
# not sure why that does not happen as default
echo "export PKG_CONFIG_PATH=/usr/lib64/openmpi/lib/pkgconfig/" >> $SINGULARITY_ENVIRONMENT
%environment
export LD_LIBRARY_PATH=/usr/lib64/openmpi/lib:/usr/lib64:/lib64:/lib
export PATH=/usr/lib64/openmpi/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
export LIBRARY=/usr/lib64/openmpi/lib:/usr/lib64:/lib64:/lib
export INCLUDE=/usr/include/openmpi-x86_64/:/usr/lib64/gfortran/modules/openmpi:/usr/include
export NETCDF=/usr/lib64/openmpi
export NETCDF_ROOT=/usr/lib64/openmpi
export HDF5_ROOT=/usr/lib64/openmpi
export JASPERINC=/usr/include/jasper/
export JASPERLIB=/usr/lib64/
Bootstrap: localimage
From: sandbox.wrf.dev
Stage: build
# Two Stage Build of GSI
# use the sandbox development image
# 1. Compile the target
# 2. Copy relevant exe to target GSI container
# Make sure the ldd / linking and inputs are ok
%labels
maintainer IT-IMGW <it.img-wien@univie.ac.at>
%files
./scripts/runscript /.singularity.d/runscript
./scripts/run_gsi.ksh /wrf/bin
%apprun init
echo "Please modify run_gsi.ksh to your needs and settings or use set_env.ksh"
cp -uv /wrf/bin/run_gsi.ksh .
%apphelp init
Use this app to copy the run_gsi.ksh to your current directory.
This is a modified version from dtcenter container version.
Please update either the run_gsi.ksh or the set_env.ksh in your local directory.
%post
# GSI requires cmake and openblas + lapack
yum -y install cmake openblas-devel.x86_64 openblas-openmp.x86_64
# Build a WRF release from the sandbox
WRF_BUILD_TARGET=em_real
LDFLAGS="-lm"
NETCDF=/usr/lib64/openmpi/
HDF5_ROOT=$NETCDF
export GSI_VERSION=3.7
export ENKF_VERSION=1.3
mkdir -p /wrf/gsi \
&& curl -SL https://dtcenter.org/sites/default/files/comGSIv${GSI_VERSION}_EnKFv${ENKF_VERSION}.tar.gz | tar -xzC /wrf/gsi
#
#
#
ln -s /usr/lib64/gfortran/modules/openmpi/* /usr/lib64/openmpi/include/
#
# prep GSI build
#
mkdir -p /wrf/gsi/build \
&& cd /wrf/gsi/build \
&& cmake /wrf/gsi/comGSIv${GSI_VERSION}_EnKFv${ENKF_VERSION}
#
# Fix a few GSI bugs
#
RUN umask 0002 \
&& sed -i 's/wij(1)/wij/g' /wrf/gsi/comGSIv3.7_EnKFv1.3/src/setuplight.f90 \
&& sed -i "s,\$, -L$NETCDF,g" /wrf/gsi/build/src/CMakeFiles/gsi.x.dir/link.txt \
&& sed -i "s,\$, -L$NETCDF,g" /wrf/gsi/build/src/enkf/CMakeFiles/enkf_wrf.x.dir/link.txt
#
# Build GSI
#
cd /wrf/gsi/build \
&& make -j 4
#
# Fix all wired paths
#
# sed -i 's,/comsoftware/gsi/,/wrf/gsi/' /wrf/bin/run_gsi.ksh \
# && sed -i 's,/gsi_build,/build' /wrf/bin/run_gsi.ksh \
# && sed -i 's,/data,./data,g' /wrf/bin/run_gsi.ksh \
# && sed -i 's,/home/scripts/case,.,g' /wrf/bin/run_gsi.ksh \
# && sed -i 's,/home/wrfprd,.,g' /wrf/bin/run_gsi.ksh \
# && sed -i 's,/home/gsiprd,.,g' /wrf/bin/run_gsi.ksh
#
# Wrap it up
#
echo "export GSI_VERSION=$GSI_VERSION" >> $SINGULARITY_ENVIRONMENT
echo "export ENKF_VERSION=$ENKF_VERSION" >> $SINGULARITY_ENVIRONMENT
CNAME=alma8.gsi
# does not work goes into /.singularity.d/env/91-environment.sh
echo "export PS1=\"[IMGW-$CNAME]\w\$ \"" >> /.singularity.d/env/99-zz-custom-env.sh
%environment
export PATH=/wrf/gsi/build/bin:/wrf/bin:$PATH
\ No newline at end of file
...@@ -72,7 +72,7 @@ echo "export PKG_CONFIG_PATH=/usr/lib64/openmpi/lib/pkgconfig/" >> $SINGULARITY_ ...@@ -72,7 +72,7 @@ echo "export PKG_CONFIG_PATH=/usr/lib64/openmpi/lib/pkgconfig/" >> $SINGULARITY_
export LD_LIBRARY_PATH=/usr/lib64/openmpi/lib:/usr/lib64:/lib64:/lib export LD_LIBRARY_PATH=/usr/lib64/openmpi/lib:/usr/lib64:/lib64:/lib
export PATH=/usr/lib64/openmpi/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin export PATH=/usr/lib64/openmpi/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
export LIBRARY=/usr/lib64/openmpi/lib:/usr/lib64:/lib64:/lib export LIBRARY=/usr/lib64/openmpi/lib:/usr/lib64:/lib64:/lib
export INCLUDE=/usr/include/openmpi-x86_64/:/usr/lib64/gfortran/modules:/usr/include export INCLUDE=/usr/include/openmpi-x86_64/:/usr/lib64/gfortran/modules/openmpi:/usr/include
export NETCDF=/usr/lib64/openmpi export NETCDF=/usr/lib64/openmpi
export NETCDF_ROOT=/usr/lib64/openmpi export NETCDF_ROOT=/usr/lib64/openmpi
export JASPERINC=/usr/include/jasper/ export JASPERINC=/usr/include/jasper/
......
...@@ -48,8 +48,14 @@ Stage: final ...@@ -48,8 +48,14 @@ Stage: final
# Copy only executables from WRF to directory # Copy only executables from WRF to directory
/wrf/bin /wrf/bin /wrf/bin /wrf/bin
/wrf/run /wrf/run /wrf/run /wrf/run
# WPS
/wrf/WPS/link_grib.csh /wrf/bin /wrf/WPS/link_grib.csh /wrf/bin
/wrf/WPS/ungrib/Variable_Tables /wrf/run /wrf/WPS/ungrib/Variable_Tables /wrf/run
/wrf/WPS/geogrid/GEOGRID* /wrf/run/geogrid/
/wrf/WPS/geogrid/gribmap.txt /wrf/run/geogrid/
/wrf/WPS/metgrid/METGRID* /wrf/run/metgrid/
/wrf/WPS/metgrid/gribmap.txt /wrf/run/metgrid/
# Log files
/wrf/WRF/compile_wrf_arw_opt34.1.log /wrf /wrf/WRF/compile_wrf_arw_opt34.1.log /wrf
/wrf/WPS/compile_wps.log /wrf /wrf/WPS/compile_wps.log /wrf
......
Bootstrap: docker
From: mambaorg/micromamba:latest
%labels
maintainer IT-IMGW <it.img-wien@univie.ac.at>
%files
../../definition-files/runscript /.singularity.d/runscript
../../definition-files/run-help /.singularity.d/runscript.help
%post
# install curl
apt -y update && apt -y install curl && apt -y clean
# install python packages
micromamba -q install -y -n base -c conda-forge \
pygrib=2.1.4 \
cartopy=0.21 \
netCDF4=1.5.8 \
pyyaml=6.0
# include WRF/WPS scripts
mkdir -p /opt/wrf && cd /opt/wrf
curl -sL https://github.com/NCAR/container-dtc-nwp/archive/refs/tags/v4.1.0.tar.gz | tar --strip-components=5 -zxC . container-dtc-nwp-4.1.0/components/scripts/common/python/
# fix wired paths
sed -i 's,/home/pythonprd,./,g' *.py
sed -i 's,/home/postprd/,./,g' *.py
sed -i 's,/home/scripts/case/,./,g' *.py
sed -i 's,cartopy.config,#cartopy.config,g' *.py
# final
CNAME=wrfpy
# does not work goes into /.singularity.d/env/91-environment.sh
echo "export PS1=\"[IMGW-$CNAME]\w\$ \"" >> /.singularity.d/env/99-zz-custom-env.sh
# add some labels
echo "libc $(ldd --version | head -n1 | cut -d' ' -f4)" >> "$SINGULARITY_LABELS"
echo "linux $(cat /etc/os-release | grep PRETTY_NAME | cut -d'=' -f2)" >> "$SINGULARITY_LABELS"
%environment
export MPLCONFIGDIR=.
export LANG=C.UTF-8
export PATH=/opt/conda/bin:$PATH
export LIBRARY=/opt/conda/lib
export INCLUDE=/opt/conda/include
export PYTHONDONTWRITEBYTECODE=1
export PYTHONPATH=/opt/wrf:$PYTHONPATH
models/WRF/dtc_nwp_flow_3.png

93.2 KiB

#!/bin/bash
# By MB
# Based on the Example from DTCenter, Boulder NCAR.
# https://dtcenter.org/nwp-containers-online-tutorial
# https://dtcenter.org/nwp-containers-online-tutorial/hurricane-sandy-case-27-oct-2012
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m'
pause() {
if $DEBUG; then
read -n1 -rsp $'Press any key to continue or Ctrl+C to exit...\n'
fi
}
warning() {
printf "%-68s[$YELLOW%10s$NC]\n" "$@" "SKIPPED"
return 0
}
report() {
if [ $? -eq 0 ]; then
printf "%-68s[$GREEN%10s$NC]\n" "$@" "OK"
return 0
else
printf "%-68s[$RED%10s$NC]\n" "$@" "FAILED"
return 1
fi
}
question() {
read -p "$@ (y/n)" yn
case $yn in
[Yy]*) return 0 ;;
*)
warning "skipping"
return 1
;;
esac
}
INAME="SANDY"
CPATH=$(dirname $0)
case_name=sandy
WRFCONTAINER=$CPATH/WRF-4.4.1-em_real.sif
PYCONTAINER=$CPATH/WRFpy.sif
GISCONTAINER=$CPATH/GSI-3.7.sif
NPROC=1
if [ $NPROC -gt 1 ]; then
warning "[$INAME] Trying to run with n=$NPROC processes."
echo "[$INAME] Make sure mpirun can be accessed!"
command -v mpirun
report "[$INAME] mpirun command works" || exit 1
# Check if Versions are similar ?
# MPI Version outside needs to be newer!
fi
# Check if we have the container?
# test -f Makefile
# report "[$INAME] Makefile found"
# if [ $? -eq 0 ]; then
# echo "[$INAME] Building WRF ..."
# make em_real
# fi
# Do a pull request from the IMGW library to retrieve the container
test -f $WRFCONTAINER
report "[$INAME] WRF em_real [$WRFCONTAINER]"
if [ $? -ne 0 ]; then
# Try to pull
echo "Manual: https://cloud.sylabs.io/library/mblaschek/models/wrf"
singularity pull $WRFCONTAINER library://mblaschek//models/wrf-emreal:4.4.1
report "[$INAME] WRF em_real pulled from library" || exit 1
fi
test -f $PYCONTAINER
report "[$INAME] WRF python [$PYCONTAINER]" || exit 1
test -f $GISCONTAINER
report "[$INAME] GIS [$GISCONTAINER]" || exit 1
PROJ_PATH=
read -p "[$INAME] Where do you want to place all files? [$PWD] :" PROJ_PATH
if [ -z "$PROJ_PATH" ]; then
PROJ_PATH=.
fi
INPUT_DIR=$PROJ_PATH/data
mkdir -vp $PROJ_PATH/sandy/data/
report "[$INAME] using $PROJ_PATH"
cd $PROJ_PATH/sandy/data/
# curl -SL https://dtcenter.ucar.edu/dfiles/container_nwp_tutorial/tar_files/container-dtc-nwp-derechodata_20120629.tar.gz | tar zxC .
# curl -SL https://dtcenter.ucar.edu/dfiles/container_nwp_tutorial/tar_files/container-dtc-nwp-snowdata_20160123.tar.gz | tar zxC .
test -d model_data
report "[$INAME] Sandy input data"
if [ $? -ne 0 ]; then
curl -SL https://dtcenter.ucar.edu/dfiles/container_nwp_tutorial/tar_files/container-dtc-nwp-sandydata_20121027.tar.gz | tar zxC .
report "[$INAME] using Sandy input data" || exit 1
fi
test -d obs_data
report "[$INAME] Sandy obs data"
if [ $? -ne 0 ]; then
curl -SL https://dtcenter.ucar.edu/dfiles/container_nwp_tutorial/tar_files/CRTM_v2.3.0.tar.gz | tar zxC .
report "[$INAME] using Sandy obs data" || exit 1
fi
test -d WPS_GEOG
report "[$INAME] Sandy geogrid data"
if [ $? -ne 0 ]; then
curl -SL https://dtcenter.ucar.edu/dfiles/container_nwp_tutorial/tar_files/wps_geog.tar.gz | tar zxC .
report "[$INAME] using Sandy geogrid data" || exit 1
fi
test -d shapefiles
report "[$INAME] Sandy natural earth data"
if [ $? -ne 0 ]; then
curl -SL https://dtcenter.ucar.edu/dfiles/container_nwp_tutorial/tar_files/shapefiles.tar.gz | tar zxC .
report "[$INAME] using Sandy natural earth data" || exit 1
fi
# return back
cd $PROJ_PATH/sandy
#
# dtcenter
# https://github.com/NCAR/container-dtc-nwp
#
#
# Setup Case files for SANDY
#
# Get sandy case files
curl -sL https://github.com/NCAR/container-dtc-nwp/archive/refs/tags/v4.1.0.tar.gz | tar --strip-components=4 -zxC . container-dtc-nwp-4.1.0/components/scripts/sandy_20121027
# fix paths
sed -i 's,/data/,./data/,g' set_env.ksh
sed -i 's,/data/,./data/,g' namelist.wps
sed -i 's,/comsoftware/wrf/WPS-4.3,./g' namelist.wps
# Optional run plot
# needs cartopy and python
# /home/scripts/common/run_python_domain.ksh
#
# Get default em_real configuration files from /wrf/run
# WRF/run and WPS/geogrid. WPS/metgrid, WPS/ungrib
#
echo "[$INAME] CP em_real files to current directory [no overwriting]"
./$WRFCONTAINER init
report "[$INAME] WRF init complete" || exit 1
# Check if Variable Tables are present / depends on input files
source set_env.ksh
# input_data must be sourced
test -d Vtable.${input_data}
report "[$INAME] Varible Table Vtable.${input_data} found" || {echo "Please provide Variable Table!"
exit 1}
# Link Grib files to here
link_grib.csh $INPUT_DIR/model_data/${case_name}/*
report "[$INAME] Model input data from: $INPUT_DIR/model_data/${case_name}/"
#
# geogrid data
#
# Remove old files
if [ -e geo_em.d*.nc ]; then
rm -rf geo_em.d*.nc
fi
# Command for geogrid
echo "[$INAME] Running geogrib ..."
singularity exec $WRFCONTAINER geogrid.exe >run_geogrid.log 2>&1
test -f geo_em.d01.nc
report "[$INAME] WPS geogrid ready." || exit 1
#
# ungrib data
#
file_date=$(cat namelist.wps | grep -i start_date | cut -d"'" -f2 | cut -d":" -f1)
# remove old files
if [ -e PFILE:${file_date} ]; then
rm -rf PFILE*
fi
if [ -e FILE:${file_date} ]; then
rm -rf FILE*
fi
# Command for ungrib
echo "[$INAME] Running ungrib ..."
singularity exec $WRFCONTAINER ungrib.exe >run_ungrib.log 2>&1
ls -ls FILE:*
report "[$INAME] WPS ungrib ready." || exit 1
#
# metgrid data
#
# Remove old files
if [ -e met_em.d*.${file_date}:00:00.nc ]; then
rm -rf met_em.d*
fi
# Command for metgrid
echo "[$INAME] Running metgrid ..."
singularity exec $WRFCONTAINER metgrid.exe >run_metgrid.log 2>&1
ls -ls met_em.d01.*
report "[$INAME] WPS metgrid ready." || exit 1
echo "[$INAME] WPS setup complete. Ready for WRF"
#
# WRF
#
sed -e '/nocolons/d' namelist.input >nml
cp namelist.input namelist.nocolons
mv nml namelist.input
# Remove old files
if [ -e wrfinput_d* ]; then
rm -rf wrfi* wrfb*
fi
# Command for real
echo "[$INAME] Running real ..."
# This can be run with MPI or serial?
if [ $NPROC -gt 1 ]; then
# MPI
module load openmpi/
mpirun -np $NPROC singularity exec $WRFCONTAINER real.exe >run_real.log 2>&1
else
# Serial run
singularity exec $WRFCONTAINER real.exe >run_real.log 2>&1
fi
test -f wrfinput_d01 && test -f wrfbdy_d01
report "[$INAME] WRF run finished"
#
# GSI Data Assimilation
#
if false; then
echo "[$INAME] Running gsi ..."
singularity run --app init $GISCONTAINER
test -f run_gsi.ksh
report "[$INAME] GSI run script ready" || exit 1
# add the correct run Command
if [ $NPROC -gt 1 ]; then
echo "export RUN_COMMAND=\"mpirun -np $NPROC singularity exec $GISCONTAINER \$GIS_EXE\"" >>set_env.ksh
else
echo "export RUN_COMMAND=\"singularity exec $GISCONTAINER \$GIS_EXE\"" >>set_env.ksh
fi
# Execute GSI
./run_gsi.ksh
fi
#
# run WRF
#
# If wrfinput_d01.orig exists, rename it to wrfinput_d01 to reset the state
if [[ -e wrfinput_d01.orig ]]; then
mv wrfinput_d01.orig wrfinput_d01
fi
# If GSI was run, update the wrfinput file
if [[ -e ./wrf_inout ]]; then
mv wrfinput_d01 wrfinput_d01.orig
cp ./wrf_inout wrfinput_d01
fi
if [ $NPROC -gt 1 ]; then
mpirun -np $NPROC singularity exec $WRFCONTAINER wrf.exe >run_wrf.log 2>&1
else
singularity exec $WRFCONTAINER wrf.exe >run_wrf.log 2>&1
fi
#!/bin/bash
# By MB
# Based on the Example from DTCenter, Boulder NCAR.
# https://dtcenter.org/nwp-containers-online-tutorial
# https://dtcenter.org/nwp-containers-online-tutorial/hurricane-sandy-case-27-oct-2012
CPATH=$(dirname $0)
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m'
pause() {
if $DEBUG; then
read -n1 -rsp $'Press any key to continue or Ctrl+C to exit...\n'
fi
}
warning() {
printf "%-68s[$YELLOW%10s$NC]\n" "$@" "SKIPPED"
return 0
}
report() {
if [ $? -eq 0 ]; then
printf "%-68s[$GREEN%10s$NC]\n" "$@" "OK"
return 0
else
printf "%-68s[$RED%10s$NC]\n" "$@" "FAILED"
return 1
fi
}
question(){
read -p "$@ (y/n)" yn
case $yn in
[Yy]*) return 0;;
* ) warning "skipping"; return 1;;
esac
}
INAME="SANDY"
# Check if we have the container?
test -f Makefile
report "[$INAME] Makefile found"
if [ $? -eq 0 ]; then
echo "[$INAME] Building WRF ..."
make em_real
fi
test -f WRF-4.4.1-em_real.sif
report "[$INAME] WRF em_real container available" || exit 1
PROJ_PATH=
read -p "[$INAME] Where do you want to place all files? [$PWD] :" PROJ_PATH
if [ -z "$PROJ_PATH" ]; then
PROJ_PATH=.
fi
mkdir -vp $PROJ_PATH/sandy/data/
report "[$INAME] using $PROJ_PATH"
cd $PROJ_PATH/sandy/data/
# curl -SL https://dtcenter.ucar.edu/dfiles/container_nwp_tutorial/tar_files/container-dtc-nwp-derechodata_20120629.tar.gz | tar zxC .
# curl -SL https://dtcenter.ucar.edu/dfiles/container_nwp_tutorial/tar_files/container-dtc-nwp-snowdata_20160123.tar.gz | tar zxC .
test -d model_data
report "[$INAME] Sandy input data"
if [ $? -ne 0 ]; then
curl -SL https://dtcenter.ucar.edu/dfiles/container_nwp_tutorial/tar_files/container-dtc-nwp-sandydata_20121027.tar.gz | tar zxC .
report "[$INAME] using Sandy input data" || exit 1
fi
test -d obs_data
report "[$INAME] Sandy obs data"
if [ $? -ne 0 ]; then
curl -SL https://dtcenter.ucar.edu/dfiles/container_nwp_tutorial/tar_files/CRTM_v2.3.0.tar.gz | tar zxC .
report "[$INAME] using Sandy obs data" || exit 1
fi
test -d WPS_GEOG
report "[$INAME] Sandy geogrid data"
if [ $? -ne 0 ]; then
curl -SL https://dtcenter.ucar.edu/dfiles/container_nwp_tutorial/tar_files/wps_geog.tar.gz | tar zxC .
report "[$INAME] using Sandy geogrid data" || exit 1
fi
test -d shapefiles
report "[$INAME] Sandy trajectory data"
if [ $? -ne 0 ]; then
curl -SL https://dtcenter.ucar.edu/dfiles/container_nwp_tutorial/tar_files/shapefiles.tar.gz | tar zxC .
report "[$INAME] using Sandy trajectory data" || exit 1
fi
exit 0
/home/scripts/common/run_python_domain.ksh
WPS_GEOG
/home/scripts/common/run_wps.ksh
/home/scripts/common/run_real.ksh
\ No newline at end of file
...@@ -16,16 +16,19 @@ bind special directories ...@@ -16,16 +16,19 @@ bind special directories
development using includes and libraries from inside the container development using includes and libraries from inside the container
> ./image.sif gfortran -I\$INCLUDE -L\$LIBRARY -o test.x test.f90 > ./image.sif gfortran -I\$INCLUDE -L\$LIBRARY -o test.x test.f90
WRF Simulations - Example WRF Simulations - Example
default run files are located in /wrf/run default run files are located in /wrf/run
executables are located in /wrf/bin executables are located in /wrf/bin
Default simulation of the container (still requires input) Default simulation of the container (still requires input), will do
1. copies /wrf/run files to the local directory 1. It will copy /wrf/run files to the local directory (.)
2. No local files will be overwritten (e.g. namelists ) 2. No local files will be overwritten (e.g. namelists), just missing files.
3. Execute real.exe 3. Execute real.exe, run:
> ./image.sif real.exe > ./image.sif real.exe
> ./image.sif wrf.exe
Using OpenMPI (requires an openmpi version on the host system)
> mpirun -np 8 ./image.sif real.exe
Manually copy run files to current directory Manually copy run files to current directory
> ./image.sif init > ./image.sif init
......
#!/bin/ksh
#####################################################
# machine set up (users should change this part)
#####################################################
set -x
#
# Constants
INPUT_DIR="./data/gsi"
CASE_DIR="."
WRFPRD_DIR="."
#####################################################
# case set up (users should change this part)
#####################################################
#
# ANAL_TIME= analysis time (YYYYMMDDHH)
# WORK_ROOT= working directory, where GSI runs
# PREPBUFR = path of PreBUFR conventional obs
# BK_FILE = path and name of background file
# OBS_ROOT = path of observations files
# FIX_ROOT = path of fix files
# GSI_EXE = path and name of the gsi executable
# ENS_ROOT = path where ensemble background files exist
WORK_ROOT=.
BK_ROOT=${WRFPRD_DIR}
CRTM_ROOT=./data/gsi/CRTM_v2.3.0
GSI_ROOT=/wrf/gsi/comGSIv3.7_EnKFv1.3
GSI_BUILD=/wrf/gsi/build
FIX_ROOT=${GSI_ROOT}/fix/
GSI_EXE=${GSI_BUILD}/bin/gsi.x
RUN_COMMAND=$GSI_EXE
GSI_NAMELIST=${GSI_ROOT}/ush/comgsi_namelist.sh
#####################################################
#
# Include case-specific settings
#
#
# This can overwrite all above directory/settings
#
#####################################################
. $CASE_DIR/set_env.ksh
#####################################################
#
# Extract START_DATE
#
BK_FILE=${BK_ROOT}/wrfinput_d01
YYYY=$(ncdump -h ${BK_FILE} | grep ":START_DATE" | cut -f2 -d"=" | cut -c3-6)
MM=$(ncdump -h ${BK_FILE} | grep ":START_DATE" | cut -f2 -d"=" | cut -c8-9)
DD=$(ncdump -h ${BK_FILE} | grep ":START_DATE" | cut -f2 -d"=" | cut -c11-12)
HH=$(ncdump -h ${BK_FILE} | grep ":START_DATE" | cut -f2 -d"=" | cut -c14-15)
ANAL_TIME=${YYYY}${MM}${DD}${HH}
# using tm06 NDAS prepbufr here, so need have proper date for the observation file, which is ANAL_TIME + 6 hour
OBSTIME=$(${GSI_BUILD}/bin/ndate.x +6 ${ANAL_TIME})
HHH=$(echo $OBSTIME | cut -c9-10)
PREPBUFR=${PREPBUFR:-./data/obs_data/prepbufr/$OBSTIME/ndas.t${HHH}z.prepbufr.tm06.nr}
#
#------------------------------------------------
# bk_core= which WRF core is used as background (NMM or ARW or NMMB)
# bkcv_option= which background error covariance and parameter will be used
# (GLOBAL or NAM)
# if_clean = clean : delete temperal files in working directory (default)
# no : leave running directory as is (this is for debug only)
# if_observer = Yes : only used as observation operater for enkf
# if_hybrid = Yes : Run GSI as 3D/4D EnVar
# if_4DEnVar = Yes : Run GSI as 4D EnVar
# if_nemsio = Yes : The GFS background files are in NEMSIO format
# if_oneob = Yes : Do single observation test
if_hybrid=No # Yes, or, No -- case sensitive !
if_4DEnVar=No # Yes, or, No -- case sensitive (set if_hybrid=Yes first)!
if_observer=No # Yes, or, No -- case sensitive !
if_nemsio=No # Yes, or, No -- case sensitive !
if_oneob=No # Yes, or, No -- case sensitive !
bk_core=ARW
bkcv_option=NAM
if_clean=clean
# Check if background file exists in the right place and is non-zero size
if [[ ! -s $BK_FILE ]]; then
echo
echo ERROR: The background file $BK_FILE does not exist!
echo
exit 1
fi
#
# setup whether to do single obs test
if [ ${if_oneob} = Yes ]; then
if_oneobtest='.true.'
else
if_oneobtest='.false.'
fi
#
# setup for GSI 3D/4D EnVar hybrid
if [ ${if_hybrid} = Yes ]; then
PDYa=$(echo $ANAL_TIME | cut -c1-8)
cyca=$(echo $ANAL_TIME | cut -c9-10)
gdate=$(date -u -d "$PDYa $cyca -6 hour" +%Y%m%d%H) #guess date is 6hr ago
gHH=$(echo $gdate | cut -c9-10)
datem1=$(date -u -d "$PDYa $cyca -1 hour" +%Y-%m-%d_%H:%M:%S) #1hr ago
datep1=$(date -u -d "$PDYa $cyca 1 hour" +%Y-%m-%d_%H:%M:%S) #1hr later
if [ ${if_nemsio} = Yes ]; then
if_gfs_nemsio='.true.'
ENSEMBLE_FILE_mem=${ENS_ROOT}/gdas.t${gHH}z.atmf006s.mem
else
if_gfs_nemsio='.false.'
ENSEMBLE_FILE_mem=${ENS_ROOT}/sfg_${gdate}_fhr06s_mem
fi
if [ ${if_4DEnVar} = Yes ]; then
BK_FILE_P1=${BK_ROOT}/wrfout_d01_${datep1}
BK_FILE_M1=${BK_ROOT}/wrfout_d01_${datem1}
if [ ${if_nemsio} = Yes ]; then
ENSEMBLE_FILE_mem_p1=${ENS_ROOT}/gdas.t${gHH}z.atmf009s.mem
ENSEMBLE_FILE_mem_m1=${ENS_ROOT}/gdas.t${gHH}z.atmf003s.mem
else
ENSEMBLE_FILE_mem_p1=${ENS_ROOT}/sfg_${gdate}_fhr09s_mem
ENSEMBLE_FILE_mem_m1=${ENS_ROOT}/sfg_${gdate}_fhr03s_mem
fi
fi
fi
# The following two only apply when if_observer = Yes, i.e. run observation operator for EnKF
# no_member number of ensemble members
# BK_FILE_mem path and base for ensemble members
no_member=20
BK_FILE_mem=${BK_ROOT}/wrfarw.mem
#
#
#####################################################
# Users should NOT make changes after this point
#####################################################
#
BYTE_ORDER=Big_Endian
# BYTE_ORDER=Little_Endian
##################################################################################
# Check GSI needed environment variables are defined and exist
#
# Make sure ANAL_TIME is defined and in the correct format
if [ ! "${ANAL_TIME}" ]; then
echo "ERROR: \$ANAL_TIME is not defined!"
exit 1
fi
# Make sure WORK_ROOT is defined and exists
if [ ! "${WORK_ROOT}" ]; then
echo "ERROR: \$WORK_ROOT is not defined!"
exit 1
fi
# Make sure the background file exists
if [ ! -r "${BK_FILE}" ]; then
echo "ERROR: ${BK_FILE} does not exist!"
exit 1
fi
# Make sure OBS_ROOT is defined and exists
if [ ! "${OBS_ROOT}" ]; then
echo "ERROR: \$OBS_ROOT is not defined!"
exit 1
fi
if [ ! -d "${OBS_ROOT}" ]; then
echo "ERROR: OBS_ROOT directory '${OBS_ROOT}' does not exist!"
exit 1
fi
# Set the path to the GSI static files
if [ ! "${FIX_ROOT}" ]; then
echo "ERROR: \$FIX_ROOT is not defined!"
exit 1
fi
if [ ! -d "${FIX_ROOT}" ]; then
echo "ERROR: fix directory '${FIX_ROOT}' does not exist!"
exit 1
fi
# Set the path to the CRTM coefficients
if [ ! "${CRTM_ROOT}" ]; then
echo "ERROR: \$CRTM_ROOT is not defined!"
exit 1
fi
if [ ! -d "${CRTM_ROOT}" ]; then
echo "ERROR: fix directory '${CRTM_ROOT}' does not exist!"
exit 1
fi
# Make sure the GSI executable exists
if [ ! -x "${GSI_EXE}" ]; then
echo "ERROR: ${GSI_EXE} does not exist!"
exit 1
fi
# Check to make sure the number of processors for running GSI was specified
if [ -z "${GSIPROC}" ]; then
echo "ERROR: The variable $GSIPROC must be set to contain the number of processors to run GSI"
exit 1
fi
#
##################################################################################
# Create the ram work directory and cd into it
workdir=${WORK_ROOT}
echo " Create working directory:" ${workdir}
if [ -d "${workdir}" ]; then
rm -rf ${workdir}/*
fi
mkdir -p ${workdir}
cd ${workdir}
#
##################################################################################
echo " Copy GSI executable, background file, and link observation bufr to working directory"
# Save a copy of the GSI executable in the workdir
# cp ${GSI_EXE} gsi.x
# Bring over background field (it's modified by GSI so we can't link to it)
cp ${BK_FILE} ./wrf_inout
if [ ${if_4DEnVar} = Yes ]; then
cp ${BK_FILE_P1} ./wrf_inou3
cp ${BK_FILE_M1} ./wrf_inou1
fi
# Link to the prepbufr data
ln -s ${PREPBUFR} ./prepbufr
# ln -s ${OBS_ROOT}/gdas1.t${HH}z.sptrmm.tm00.bufr_d tmirrbufr
# Link to the radiance data
srcobsfile[1]=${OBS_ROOT}/gdas1.t${HH}z.satwnd.tm00.bufr_d
gsiobsfile[1]=satwnd
srcobsfile[2]=${OBS_ROOT}/gdas1.t${HH}z.1bamua.tm00.bufr_d
gsiobsfile[2]=amsuabufr
srcobsfile[3]=${OBS_ROOT}/gdas1.t${HH}z.1bhrs4.tm00.bufr_d
gsiobsfile[3]=hirs4bufr
srcobsfile[4]=${OBS_ROOT}/gdas1.t${HH}z.1bmhs.tm00.bufr_d
gsiobsfile[4]=mhsbufr
srcobsfile[5]=${OBS_ROOT}/gdas1.t${HH}z.1bamub.tm00.bufr_d
gsiobsfile[5]=amsubbufr
srcobsfile[6]=${OBS_ROOT}/gdas1.t${HH}z.ssmisu.tm00.bufr_d
gsiobsfile[6]=ssmirrbufr
# srcobsfile[7]=${OBS_ROOT}/gdas1.t${HH}z.airsev.tm00.bufr_d
gsiobsfile[7]=airsbufr
srcobsfile[8]=${OBS_ROOT}/gdas1.t${HH}z.sevcsr.tm00.bufr_d
gsiobsfile[8]=seviribufr
srcobsfile[9]=${OBS_ROOT}/gdas1.t${HH}z.iasidb.tm00.bufr_d
gsiobsfile[9]=iasibufr
srcobsfile[10]=${OBS_ROOT}/gdas1.t${HH}z.gpsro.tm00.bufr_d
gsiobsfile[10]=gpsrobufr
srcobsfile[11]=${OBS_ROOT}/gdas1.t${HH}z.amsr2.tm00.bufr_d
gsiobsfile[11]=amsrebufr
srcobsfile[12]=${OBS_ROOT}/gdas1.t${HH}z.atms.tm00.bufr_d
gsiobsfile[12]=atmsbufr
srcobsfile[13]=${OBS_ROOT}/gdas1.t${HH}z.geoimr.tm00.bufr_d
gsiobsfile[13]=gimgrbufr
srcobsfile[14]=${OBS_ROOT}/gdas1.t${HH}z.gome.tm00.bufr_d
gsiobsfile[14]=gomebufr
srcobsfile[15]=${OBS_ROOT}/gdas1.t${HH}z.omi.tm00.bufr_d
gsiobsfile[15]=omibufr
srcobsfile[16]=${OBS_ROOT}/gdas1.t${HH}z.osbuv8.tm00.bufr_d
gsiobsfile[16]=sbuvbufr
srcobsfile[17]=${OBS_ROOT}/gdas1.t${HH}z.eshrs3.tm00.bufr_d
gsiobsfile[17]=hirs3bufrears
srcobsfile[18]=${OBS_ROOT}/gdas1.t${HH}z.esamua.tm00.bufr_d
gsiobsfile[18]=amsuabufrears
srcobsfile[19]=${OBS_ROOT}/gdas1.t${HH}z.esmhs.tm00.bufr_d
gsiobsfile[19]=mhsbufrears
srcobsfile[20]=${OBS_ROOT}/rap.t${HH}z.nexrad.tm00.bufr_d
gsiobsfile[20]=l2rwbufr
srcobsfile[21]=${OBS_ROOT}/rap.t${HH}z.lgycld.tm00.bufr_d
gsiobsfile[21]=larcglb
srcobsfile[22]=${OBS_ROOT}/gdas1.t${HH}z.glm.tm00.bufr_d
gsiobsfile[22]=
# ii=1
# while [[ $ii -le 21 ]]; do
# if [ -r "${srcobsfile[$ii]}" ]; then
# # ln -s ${srcobsfile[$ii]} ${gsiobsfile[$ii]}
# echo "link source obs file ${srcobsfile[$ii]}"
# fi
# ((ii = $ii + 1))
# done
#
##################################################################################
ifhyb=.false.
if [ ${if_hybrid} = Yes ]; then
ls ${ENSEMBLE_FILE_mem}* >filelist02
if [ ${if_4DEnVar} = Yes ]; then
ls ${ENSEMBLE_FILE_mem_p1}* >filelist03
ls ${ENSEMBLE_FILE_mem_m1}* >filelist01
fi
nummem=$(more filelist02 | wc -l)
nummem=$((nummem - 3))
if [[ ${nummem} -ge 5 ]]; then
ifhyb=.true.
${ECHO} " GSI hybrid uses ${ENSEMBLE_FILE_mem} with n_ens=${nummem}"
fi
fi
if4d=.false.
if [[ ${ifhyb} = .true. && ${if_4DEnVar} = Yes ]]; then
if4d=.true.
fi
#
##################################################################################
echo " Copy fixed files and link CRTM coefficient files to working directory"
# Set fixed files
# berror = forecast model background error statistics
# specoef = CRTM spectral coefficients
# trncoef = CRTM transmittance coefficients
# emiscoef = CRTM coefficients for IR sea surface emissivity model
# aerocoef = CRTM coefficients for aerosol effects
# cldcoef = CRTM coefficients for cloud effects
# satinfo = text file with information about assimilation of brightness temperatures
# satangl = angle dependent bias correction file (fixed in time)
# pcpinfo = text file with information about assimilation of prepcipitation rates
# ozinfo = text file with information about assimilation of ozone data
# errtable = text file with obs error for conventional data (regional only)
# convinfo = text file with information about assimilation of conventional data
# lightinfo= text file with information about assimilation of GLM lightning data
# bufrtable= text file ONLY needed for single obs test (oneobstest=.true.)
# bftab_sst= bufr table for sst ONLY needed for sst retrieval (retrieval=.true.)
if [ ${bkcv_option} = GLOBAL ]; then
echo ' Use global background error covariance'
BERROR=${FIX_ROOT}/${BYTE_ORDER}/nam_glb_berror.f77.gcv
OBERROR=${FIX_ROOT}/prepobs_errtable.global
if [ ${bk_core} = NMM ]; then
ANAVINFO=${FIX_ROOT}/anavinfo_ndas_netcdf_glbe
fi
if [ ${bk_core} = ARW ]; then
ANAVINFO=${FIX_ROOT}/anavinfo_arw_netcdf_glbe
fi
if [ ${bk_core} = NMMB ]; then
ANAVINFO=${FIX_ROOT}/anavinfo_nems_nmmb_glb
fi
else
echo ' Use NAM background error covariance'
BERROR=${FIX_ROOT}/${BYTE_ORDER}/nam_nmmstat_na.gcv
OBERROR=${FIX_ROOT}/nam_errtable.r3dv
if [ ${bk_core} = NMM ]; then
ANAVINFO=${FIX_ROOT}/anavinfo_ndas_netcdf
fi
if [ ${bk_core} = ARW ]; then
ANAVINFO=${FIX_ROOT}/anavinfo_arw_netcdf
fi
if [ ${bk_core} = NMMB ]; then
ANAVINFO=${FIX_ROOT}/anavinfo_nems_nmmb
fi
fi
SATANGL=${FIX_ROOT}/global_satangbias.txt
SATINFO=${FIX_ROOT}/global_satinfo.txt
CONVINFO=${FIX_ROOT}/global_convinfo.txt
OZINFO=${FIX_ROOT}/global_ozinfo.txt
PCPINFO=${FIX_ROOT}/global_pcpinfo.txt
LIGHTINFO=${FIX_ROOT}/global_lightinfo.txt
# copy Fixed fields to working directory
cp $ANAVINFO anavinfo
cp $BERROR berror_stats
cp $SATANGL satbias_angle
cp $SATINFO satinfo
cp $CONVINFO convinfo
cp $OZINFO ozinfo
cp $PCPINFO pcpinfo
cp $LIGHTINFO lightinfo
cp $OBERROR errtable
#
# # CRTM Spectral and Transmittance coefficients
CRTM_ROOT_ORDER=${CRTM_ROOT}/${BYTE_ORDER}
emiscoef_IRwater=${CRTM_ROOT_ORDER}/Nalli.IRwater.EmisCoeff.bin
emiscoef_IRice=${CRTM_ROOT_ORDER}/NPOESS.IRice.EmisCoeff.bin
emiscoef_IRland=${CRTM_ROOT_ORDER}/NPOESS.IRland.EmisCoeff.bin
emiscoef_IRsnow=${CRTM_ROOT_ORDER}/NPOESS.IRsnow.EmisCoeff.bin
emiscoef_VISice=${CRTM_ROOT_ORDER}/NPOESS.VISice.EmisCoeff.bin
emiscoef_VISland=${CRTM_ROOT_ORDER}/NPOESS.VISland.EmisCoeff.bin
emiscoef_VISsnow=${CRTM_ROOT_ORDER}/NPOESS.VISsnow.EmisCoeff.bin
emiscoef_VISwater=${CRTM_ROOT_ORDER}/NPOESS.VISwater.EmisCoeff.bin
emiscoef_MWwater=${CRTM_ROOT_ORDER}/FASTEM6.MWwater.EmisCoeff.bin
aercoef=${CRTM_ROOT_ORDER}/AerosolCoeff.bin
cldcoef=${CRTM_ROOT_ORDER}/CloudCoeff.bin
ln -s $emiscoef_IRwater ./Nalli.IRwater.EmisCoeff.bin
ln -s $emiscoef_IRice ./NPOESS.IRice.EmisCoeff.bin
ln -s $emiscoef_IRsnow ./NPOESS.IRsnow.EmisCoeff.bin
ln -s $emiscoef_IRland ./NPOESS.IRland.EmisCoeff.bin
ln -s $emiscoef_VISice ./NPOESS.VISice.EmisCoeff.bin
ln -s $emiscoef_VISland ./NPOESS.VISland.EmisCoeff.bin
ln -s $emiscoef_VISsnow ./NPOESS.VISsnow.EmisCoeff.bin
ln -s $emiscoef_VISwater ./NPOESS.VISwater.EmisCoeff.bin
ln -s $emiscoef_MWwater ./FASTEM6.MWwater.EmisCoeff.bin
ln -s $aercoef ./AerosolCoeff.bin
ln -s $cldcoef ./CloudCoeff.bin
# Copy CRTM coefficient files based on entries in satinfo file
for file in $(awk '{if($1!~"!"){print $1}}' ./satinfo | sort | uniq); do
ln -s ${CRTM_ROOT_ORDER}/${file}.SpcCoeff.bin ./
ln -s ${CRTM_ROOT_ORDER}/${file}.TauCoeff.bin ./
done
# Only need this file for single obs test
bufrtable=${FIX_ROOT}/prepobs_prep.bufrtable
cp $bufrtable ./prepobs_prep.bufrtable
# for satellite bias correction
# Users may need to use their own satbias files for correct bias correction
cp ${GSI_ROOT}/fix/comgsi_satbias_in ./satbias_in
cp ${GSI_ROOT}/fix/comgsi_satbias_pc_in ./satbias_pc_in
#
##################################################################################
# Set some parameters for use by the GSI executable and to build the namelist
echo " Build the namelist "
# default is NAM
# as_op='1.0,1.0,0.5 ,0.7,0.7,0.5,1.0,1.0,'
vs_op='1.0,'
hzscl_op='0.373,0.746,1.50,'
if [ ${bkcv_option} = GLOBAL ]; then
# as_op='0.6,0.6,0.75,0.75,0.75,0.75,1.0,1.0'
vs_op='0.7,'
hzscl_op='1.7,0.8,0.5,'
fi
if [ ${bk_core} = NMMB ]; then
vs_op='0.6,'
fi
# default is NMM
bk_core_arw='.false.'
bk_core_nmm='.true.'
bk_core_nmmb='.false.'
bk_if_netcdf='.true.'
if [ ${bk_core} = ARW ]; then
bk_core_arw='.true.'
bk_core_nmm='.false.'
bk_core_nmmb='.false.'
bk_if_netcdf='.true.'
fi
if [ ${bk_core} = NMMB ]; then
bk_core_arw='.false.'
bk_core_nmm='.false.'
bk_core_nmmb='.true.'
bk_if_netcdf='.false.'
fi
if [ ${if_observer} = Yes ]; then
nummiter=0
if_read_obs_save='.true.'
if_read_obs_skip='.false.'
else
nummiter=2
if_read_obs_save='.false.'
if_read_obs_skip='.false.'
fi
# Build the GSI namelist on-the-fly
. $GSI_NAMELIST
# modify the anavinfo vertical levels based on wrf_inout for WRF ARW and NMM
if [ ${bk_core} = ARW ] || [ ${bk_core} = NMM ]; then
bklevels=$(ncdump -h wrf_inout | grep "bottom_top =" | awk '{print $3}')
bklevels_stag=$(ncdump -h wrf_inout | grep "bottom_top_stag =" | awk '{print $3}')
anavlevels=$(cat anavinfo | grep ' sf ' | tail -1 | awk '{print $2}') # levels of sf, vp, u, v, t, etc
anavlevels_stag=$(cat anavinfo | grep ' prse ' | tail -1 | awk '{print $2}') # levels of prse
sed -i 's/ '$anavlevels'/ '$bklevels'/g' anavinfo
sed -i 's/ '$anavlevels_stag'/ '$bklevels_stag'/g' anavinfo
fi
#
###################################################
# run GSI
###################################################
echo ' Run GSI with' ${bk_core} 'background'
# case $ARCH in
# 'IBM_LSF')
# ${RUN_COMMAND} ./gsi.x <gsiparm.anl >stdout 2>&1
# ;;
# *)
# ${RUN_COMMAND} ./gsi.x >stdout 2>&1
# ;;
# esac
$RUN_COMMAND >stdout 2>&1
##################################################################
# run time error check
##################################################################
error=$?
if [ ${error} -ne 0 ]; then
echo "ERROR: ${GSI} crashed Exit status=${error}"
exit ${error}
fi
#
##################################################################
#
# GSI updating satbias_in
#
# GSI updating satbias_in (only for cycling assimilation)
# Copy the output to more understandable names
ln -s stdout stdout.anl.${ANAL_TIME}
ln -s wrf_inout wrfanl.${ANAL_TIME}
ln -s fort.201 fit_p1.${ANAL_TIME}
ln -s fort.202 fit_w1.${ANAL_TIME}
ln -s fort.203 fit_t1.${ANAL_TIME}
ln -s fort.204 fit_q1.${ANAL_TIME}
ln -s fort.207 fit_rad1.${ANAL_TIME}
# Loop over first and last outer loops to generate innovation
# diagnostic files for indicated observation types (groups)
#
# NOTE: Since we set miter=2 in GSI namelist SETUP, outer
# loop 03 will contain innovations with respect to
# the analysis. Creation of o-a innovation files
# is triggered by write_diag(3)=.true. The setting
# write_diag(1)=.true. turns on creation of o-g
# innovation files.
#
loops="01 03"
for loop in $loops; do
case $loop in
01) string=ges ;;
03) string=anl ;;
*) string=$loop ;;
esac
# Collect diagnostic files for obs types (groups) below
# listall="conv amsua_metop-a mhs_metop-a hirs4_metop-a hirs2_n14 msu_n14 \
# sndr_g08 sndr_g10 sndr_g12 sndr_g08_prep sndr_g10_prep sndr_g12_prep \
# sndrd1_g08 sndrd2_g08 sndrd3_g08 sndrd4_g08 sndrd1_g10 sndrd2_g10 \
# sndrd3_g10 sndrd4_g10 sndrd1_g12 sndrd2_g12 sndrd3_g12 sndrd4_g12 \
# hirs3_n15 hirs3_n16 hirs3_n17 amsua_n15 amsua_n16 amsua_n17 \
# amsub_n15 amsub_n16 amsub_n17 hsb_aqua airs_aqua amsua_aqua \
# goes_img_g08 goes_img_g10 goes_img_g11 goes_img_g12 \
# pcp_ssmi_dmsp pcp_tmi_trmm sbuv2_n16 sbuv2_n17 sbuv2_n18 \
# omi_aura ssmi_f13 ssmi_f14 ssmi_f15 hirs4_n18 amsua_n18 mhs_n18 \
# amsre_low_aqua amsre_mid_aqua amsre_hig_aqua ssmis_las_f16 \
# ssmis_uas_f16 ssmis_img_f16 ssmis_env_f16 mhs_metop_b \
# hirs4_metop_b hirs4_n19 amusa_n19 mhs_n19 goes_glm_16"
listall=$(ls pe* | cut -f2 -d"." | awk '{print substr($0, 0, length($0)-3)}' | sort | uniq)
for type in $listall; do
count=$(ls pe*${type}_${loop}* | wc -l)
if [[ $count -gt 0 ]]; then
cat pe*${type}_${loop}* >diag_${type}_${string}.${ANAL_TIME}
fi
done
done
# Clean working directory to save only important files
ls -l * >list_run_directory
if [[ ${if_clean} = clean && ${if_observer} != Yes ]]; then
echo ' Clean working directory after GSI run'
rm -f *Coeff.bin # all CRTM coefficient files
rm -f pe0* # diag files on each processor
rm -f obs_input.* # observation middle files
rm -f siganl sigf0? # background middle files
rm -f fsize_* # delete temperal file for bufr size
fi
#
#
#################################################
# start to calculate diag files for each member
#################################################
#
if [ ${if_observer} = Yes ]; then
string=ges
for type in $listall; do
count=0
if [[ -f diag_${type}_${string}.${ANAL_TIME} ]]; then
mv diag_${type}_${string}.${ANAL_TIME} diag_${type}_${string}.ensmean
fi
done
mv wrf_inout wrf_inout_ensmean
# Build the GSI namelist on-the-fly for each member
nummiter=0
if_read_obs_save='.false.'
if_read_obs_skip='.true.'
. $GSI_NAMELIST
# Loop through each member
loop="01"
ensmem=1
while [[ $ensmem -le $no_member ]]; do
rm pe0*
print "\$ensmem is $ensmem"
ensmemid=$(printf %3.3i $ensmem)
# get new background for each member
if [[ -f wrf_inout ]]; then
rm wrf_inout
fi
BK_FILE=${BK_FILE_mem}${ensmemid}
echo $BK_FILE
ln -s $BK_FILE wrf_inout
# run GSI
echo ' Run GSI with' ${bk_core} 'for member ', ${ensmemid}
# case $ARCH in
# 'IBM_LSF')
# ${RUN_COMMAND} ./gsi.x <gsiparm.anl >stdout_mem${ensmemid} 2>&1
# ;;
# *)
# ${RUN_COMMAND} ./gsi.x >stdout_mem${ensmemid} 2>&1
# ;;
# esac
$RUN_COMMAND >stdout_mem${ensmemid} 2>&1
# run time error check and save run time file status
error=$?
if [ ${error} -ne 0 ]; then
echo "ERROR: ${GSI} crashed for member ${ensmemid} Exit status=${error}"
exit ${error}
fi
ls -l * >list_run_directory_mem${ensmemid}
# generate diag files
for type in $listall; do
count=$(ls pe*${type}_${loop}* | wc -l)
if [[ $count -gt 0 ]]; then
cat pe*${type}_${loop}* >diag_${type}_${string}.mem${ensmemid}
fi
done
# next member
((ensmem += 1))
done
fi
exit 0
...@@ -14,16 +14,17 @@ if [ $# -gt 0 ]; then ...@@ -14,16 +14,17 @@ if [ $# -gt 0 ]; then
echo "[WRF] no automatic copying" echo "[WRF] no automatic copying"
fi fi
# run cmd line options # run cmd line options
echo "Executing" echo "[WRF] Executing"
exec "$@" exec "$@"
echo "Finished inside the container." echo "[WRF] Finished inside the container."
else else
# interactive # interactive
echo "Welcome inside an IMGW container!" echo "[WRF] Welcome inside an IMGW container!"
echo "Get help: singularity run-help $SINGULARITY_NAME" echo "[WRF] Get help: singularity run-help $SINGULARITY_NAME"
echo "WRF ($WRF_VERSION, $WRF_BUILD_TARGET)" echo "[WRF] $WRF_VERSION, $WRF_BUILD_TARGET"
echo "WRF $WRF_BUILD_TARGET -> /wrf/run" echo "[WRF] $WRF_BUILD_TARGET -> /wrf/run"
echo "WRF exe -> /wrf/bin" echo "[WRF] exe -> /wrf/bin"
cd /wrf/run echo "[WRF] Automatic copy of /wrf/run to current directory: ./$(basename $SINGULARITY_CONTAINER) init"
echo "[WRF] e.g. run real.exe from outside: ./$(basename $SINGULARITY_CONTAINER) real.exe"
exec /.singularity.d/actions/shell "$@" exec /.singularity.d/actions/shell "$@"
fi fi
\ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment