Skip to content
Snippets Groups Projects
Commit d12499ec authored by Michael Blaschek's avatar Michael Blaschek :bicyclist:
Browse files

wrf update

parent 53eaae40
Branches
No related tags found
No related merge requests found
Showing
with 1259 additions and 203 deletions
Bootstrap: library
From: mblaschek/imgw/ubuntu:18.04
%labels
APPLICATION_NAME miniconda3
APPLICATION_VERSION py39-4.9.2-Linux-x86_64
APPLICATION_URL https://docs.conda.io
AUTHOR_NAME Michael Blaschek
AUTHOR_EMAIL michael.blaschek@univie.ac.at
LAST_UPDATED 20211118
%setup
%environment
# Set the conda distribution type, its version number, the python
# version it utilizes, the root and installation directories where
# the distribution will be installed within the container, and the
# root URL to the installer
export CONDA_DISTRIBUTION='miniconda'
export CONDA_VERSION='3'
export CONDA_PYTHON_VERSION='py39'
export CONDA_INSTALLER_VERSION='4.9.2'
export CONDA_ARCH='Linux-x86_64'
export CONDA_INSTALL_DIR="/opt/${CONDA_DISTRIBUTION}${CONDA_VERSION}"
# Set PATH to conda distribution
export PATH="${CONDA_INSTALL_DIR}/bin:${PATH}"
%post -c /bin/bash
# Set operating system mirror URL
export MIRRORURL='http://at.archive.ubuntu.com/ubuntu'
# Set operating system version
export OSVERSION='bionic'
# Set system locale
export LC_ALL='C'
# Set debian frontend interface
export DEBIAN_FRONTEND='noninteractive'
# Upgrade all software packages to their latest versions
apt-get -y update && apt-get -y upgrade
cd /tmp
# Set the conda distribution type, its version number, the python
# version it utilizes, the root and installation directories where
# the distribution will be installed within the container, and the
# root URL to the installer
export CONDA_DISTRIBUTION='miniconda'
export CONDA_VERSION='3'
export CONDA_PYTHON_VERSION='py39'
export CONDA_INSTALLER_VERSION='4.9.2'
export CONDA_ARCH='Linux-x86_64'
export CONDA_INSTALLER="${CONDA_DISTRIBUTION^}${CONDA_VERSION}-${CONDA_PYTHON_VERSION}_${CONDA_INSTALLER_VERSION}-${CONDA_ARCH}.sh"
export CONDA_INSTALL_DIR="/opt/${CONDA_DISTRIBUTION}${CONDA_VERSION}"
export CONDA_ROOT_URL='https://repo.anaconda.com'
# Download and install conda distribution
wget "${CONDA_ROOT_URL}/${CONDA_DISTRIBUTION}/${CONDA_INSTALLER}"
chmod +x "${CONDA_INSTALLER}"
"./${CONDA_INSTALLER}" -b -p "${CONDA_INSTALL_DIR}"
# Remove conda installer
rm "${CONDA_INSTALLER}"
# Add MPI Package from conda-forge
# ucx
# openmpi
$CONDA_INSTALL_DIR/bin/conda install -y -c conda-forge ucx openmpi mpi4py
# Cleanup
apt-get -y autoremove --purge
apt-get -y clean
# Update database for mlocate
updatedb
%files
%runscript
%test
Bootstrap: docker
From: mambaorg/micromamba:latest
%labels
maintainer IT-IMGW <it.img-wien@univie.ac.at>
%files
$PWD/definition-files/runscript /.singularity.d/runscript
$PWD/definition-files/run-help /.singularity.d/runscript.help
%apprun mpitest
nproc=4
if [ $# -eq 1 ]; then
nproc=$1
fi
echo "Running MPITest : $nproc"
exec mpirun -np $nproc mpitest.x
%appfiles mpitest
./definition-files/MPI/mpitest.c ./mpitest.c
%appinstall mpitest
export PATH=/opt/conda/bin:$PATH
export LIBRARY=/opt/conda/lib
export INCLUDE=/opt/conda/include
mpicc mpitest.c -o mpitest.x
%environment
export LANG=C.UTF-8
export PATH=/opt/conda/bin:$PATH
export LIBRARY=/opt/conda/lib
export INCLUDE=/opt/conda/include
%post
micromamba -q install -y -n base -c conda-forge \
python=3.9 \
ucx \
openmpi \
mpi4py \
&& micromamba clean --all --yes
# command prompt name
CNAME=m.ompi
# does not work goes into /.singularity.d/env/91-environment.sh
echo "export PS1=\"[IMGW-$CNAME]\w\$ \"" >> /.singularity.d/env/99-zz-custom-env.sh
# add some labels
echo "libc $(ldd --version | head -n1 | cut -d' ' -f4)" >> "$SINGULARITY_LABELS"
echo "linux $(cat /etc/os-release | grep PRETTY_NAME | cut -d'=' -f2)" >> "$SINGULARITY_LABELS"
%test
mpicc --version
Bootstrap: docker Bootstrap: docker
From: texlive:latest From: texlive/texlive:latest
%labels %labels
maintainer IT-IMGW <it.img-wien@univie.ac.at> maintainer IT-IMGW <it.img-wien@univie.ac.at>
......
...@@ -11,6 +11,7 @@ apt install -y \ ...@@ -11,6 +11,7 @@ apt install -y \
python3 \ python3 \
gcc-6 \ gcc-6 \
gfortran-6 \ gfortran-6 \
libopenmpi-dev \
libmpich-dev \ libmpich-dev \
libhdf5-dev \ libhdf5-dev \
libnetcdf-dev \ libnetcdf-dev \
......
# WRF in a container
:construction:
This is still experimental. No guarantees.
Please find the following containers available for download:
- WRF
- GSI
- WRFpy
- WRF.dev
## Design
build a container with all requirements installed and make it easily available to users to further develop. We will use a two stage build to complete the development
1. WRF sandbox / requirements
2. Build container
3. Final container
Build some special containers, that are most requested, e.g. em_real and em_les
## Example: Hurricane Sandy
## Performance tests
We ran the same container:
- VSC
- JET
- SRVX1
Multi-Node?
# Development
If you like to build your own WRF container with customized source code or a different setting/purpose, then you can find here some guidance on how to do this.
Steps:
1. Pull the WRF development container from
2. Pull the source code of WRF / WPS ...
3. Configure the Source code to use the libraries inside the container
4. Test your settings / compilation
5. Build your own container from the WRF development container and your SRC Code.
Bootstrap: localimage
From: ../../containers/alma8.base.sif
%labels
maintainer IT-IMGW <it.img-wien@univie.ac.at>
baseimage AlmaLinux8
%post
# Every line will be a layer in the container
# See https://fedoraproject.org/wiki/EPEL#Quickstart for powertools
# yum --enablerepo epel groupinstall -y "Development Tools" \
yum update -y \
&& yum install -y dnf-plugins-core \
&& dnf config-manager --set-enabled powertools \
&& yum install -y epel-release \
&& yum update -y \
&& yum --enablerepo epel install -y \
curl wget \
file \
findutils \
gcc-c++ \
gcc \
gcc-gfortran \
glibc.i686 libgcc.i686 \
libpng-devel jasper-libs jasper-devel \
m4 make perl cmake \
flex flex-devel bison bison-devel \
libcurl-devel \
libxml2 libxml2-devel perl-XML-LibXML ImageMagick \
python3 python3-pip python3-devel \
tar bash tcsh time which zlib zlib-devel \
git scl \
gnupg2 \
hostname \
iproute \
patch \
openmpi-devel \
openmpi \
hdf5-openmpi-devel \
hdf5-openmpi-static \
netcdf-openmpi-devel \
netcdf-openmpi-static \
netcdf-fortran-openmpi-devel \
netcdf-fortran-openmpi-static \
openblas-devel.x86_64 \
openblas-openmp.x86_64 \
&& rm -rf /var/cache/yum \
&& yum clean all \
&& dnf clean all \
&& rm -rf /usr/share/doc \
&& rm -rf /usr/share/man \
&& ln -s /usr/include/openmpi-x86_64/ /usr/lib64/openmpi/include
# command prompt name
CNAME=wrf.dev
# does not work goes into /.singularity.d/env/91-environment.sh
echo "export PS1=\"[IMGW-$CNAME]\w\$ \"" >> /.singularity.d/env/99-zz-custom-env.sh
# not sure why that does not happen as default
echo "export PKG_CONFIG_PATH=/usr/lib64/openmpi/lib/pkgconfig/" >> $SINGULARITY_ENVIRONMENT
%environment
export LD_LIBRARY_PATH=/usr/lib64/openmpi/lib:/usr/lib64:/lib64:/lib
export PATH=/usr/lib64/openmpi/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
export LIBRARY=/usr/lib64/openmpi/lib:/usr/lib64:/lib64:/lib
export INCLUDE=/usr/include/openmpi-x86_64/:/usr/lib64/gfortran/modules/openmpi:/usr/include
export NETCDF=/usr/lib64/openmpi
export NETCDF_ROOT=/usr/lib64/openmpi
export HDF5_ROOT=/usr/lib64/openmpi
export JASPERINC=/usr/include/jasper/
export JASPERLIB=/usr/lib64/
Bootstrap: localimage
From: sandbox.wrf.dev
Stage: build
# Two Stage Build of GSI
# use the sandbox development image
# 1. Compile the target
# 2. Copy relevant exe to target GSI container
# Make sure the ldd / linking and inputs are ok
%labels
maintainer IT-IMGW <it.img-wien@univie.ac.at>
%files
./scripts/runscript /.singularity.d/runscript
./scripts/run_gsi.ksh /wrf/bin
%apprun init
echo "Please modify run_gsi.ksh to your needs and settings or use set_env.ksh"
cp -uv /wrf/bin/run_gsi.ksh .
%apphelp init
Use this app to copy the run_gsi.ksh to your current directory.
This is a modified version from dtcenter container version.
Please update either the run_gsi.ksh or the set_env.ksh in your local directory.
%post
# GSI requires cmake and openblas + lapack
yum -y install cmake openblas-devel.x86_64 openblas-openmp.x86_64
# Build a WRF release from the sandbox
WRF_BUILD_TARGET=em_real
LDFLAGS="-lm"
NETCDF=/usr/lib64/openmpi/
HDF5_ROOT=$NETCDF
export GSI_VERSION=3.7
export ENKF_VERSION=1.3
mkdir -p /wrf/gsi \
&& curl -SL https://dtcenter.org/sites/default/files/comGSIv${GSI_VERSION}_EnKFv${ENKF_VERSION}.tar.gz | tar -xzC /wrf/gsi
#
#
#
ln -s /usr/lib64/gfortran/modules/openmpi/* /usr/lib64/openmpi/include/
#
# prep GSI build
#
mkdir -p /wrf/gsi/build \
&& cd /wrf/gsi/build \
&& cmake /wrf/gsi/comGSIv${GSI_VERSION}_EnKFv${ENKF_VERSION}
#
# Fix a few GSI bugs
#
RUN umask 0002 \
&& sed -i 's/wij(1)/wij/g' /wrf/gsi/comGSIv3.7_EnKFv1.3/src/setuplight.f90 \
&& sed -i "s,\$, -L$NETCDF,g" /wrf/gsi/build/src/CMakeFiles/gsi.x.dir/link.txt \
&& sed -i "s,\$, -L$NETCDF,g" /wrf/gsi/build/src/enkf/CMakeFiles/enkf_wrf.x.dir/link.txt
#
# Build GSI
#
cd /wrf/gsi/build \
&& make -j 4
#
# Fix all wired paths
#
# sed -i 's,/comsoftware/gsi/,/wrf/gsi/' /wrf/bin/run_gsi.ksh \
# && sed -i 's,/gsi_build,/build' /wrf/bin/run_gsi.ksh \
# && sed -i 's,/data,./data,g' /wrf/bin/run_gsi.ksh \
# && sed -i 's,/home/scripts/case,.,g' /wrf/bin/run_gsi.ksh \
# && sed -i 's,/home/wrfprd,.,g' /wrf/bin/run_gsi.ksh \
# && sed -i 's,/home/gsiprd,.,g' /wrf/bin/run_gsi.ksh
#
# Wrap it up
#
echo "export GSI_VERSION=$GSI_VERSION" >> $SINGULARITY_ENVIRONMENT
echo "export ENKF_VERSION=$ENKF_VERSION" >> $SINGULARITY_ENVIRONMENT
CNAME=alma8.gsi
# does not work goes into /.singularity.d/env/91-environment.sh
echo "export PS1=\"[IMGW-$CNAME]\w\$ \"" >> /.singularity.d/env/99-zz-custom-env.sh
%environment
export PATH=/wrf/gsi/build/bin:/wrf/bin:$PATH
\ No newline at end of file
...@@ -72,7 +72,7 @@ echo "export PKG_CONFIG_PATH=/usr/lib64/openmpi/lib/pkgconfig/" >> $SINGULARITY_ ...@@ -72,7 +72,7 @@ echo "export PKG_CONFIG_PATH=/usr/lib64/openmpi/lib/pkgconfig/" >> $SINGULARITY_
export LD_LIBRARY_PATH=/usr/lib64/openmpi/lib:/usr/lib64:/lib64:/lib export LD_LIBRARY_PATH=/usr/lib64/openmpi/lib:/usr/lib64:/lib64:/lib
export PATH=/usr/lib64/openmpi/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin export PATH=/usr/lib64/openmpi/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
export LIBRARY=/usr/lib64/openmpi/lib:/usr/lib64:/lib64:/lib export LIBRARY=/usr/lib64/openmpi/lib:/usr/lib64:/lib64:/lib
export INCLUDE=/usr/include/openmpi-x86_64/:/usr/lib64/gfortran/modules:/usr/include export INCLUDE=/usr/include/openmpi-x86_64/:/usr/lib64/gfortran/modules/openmpi:/usr/include
export NETCDF=/usr/lib64/openmpi export NETCDF=/usr/lib64/openmpi
export NETCDF_ROOT=/usr/lib64/openmpi export NETCDF_ROOT=/usr/lib64/openmpi
export JASPERINC=/usr/include/jasper/ export JASPERINC=/usr/include/jasper/
......
...@@ -48,8 +48,14 @@ Stage: final ...@@ -48,8 +48,14 @@ Stage: final
# Copy only executables from WRF to directory # Copy only executables from WRF to directory
/wrf/bin /wrf/bin /wrf/bin /wrf/bin
/wrf/run /wrf/run /wrf/run /wrf/run
# WPS
/wrf/WPS/link_grib.csh /wrf/bin /wrf/WPS/link_grib.csh /wrf/bin
/wrf/WPS/ungrib/Variable_Tables /wrf/run /wrf/WPS/ungrib/Variable_Tables /wrf/run
/wrf/WPS/geogrid/GEOGRID* /wrf/run/geogrid/
/wrf/WPS/geogrid/gribmap.txt /wrf/run/geogrid/
/wrf/WPS/metgrid/METGRID* /wrf/run/metgrid/
/wrf/WPS/metgrid/gribmap.txt /wrf/run/metgrid/
# Log files
/wrf/WRF/compile_wrf_arw_opt34.1.log /wrf /wrf/WRF/compile_wrf_arw_opt34.1.log /wrf
/wrf/WPS/compile_wps.log /wrf /wrf/WPS/compile_wps.log /wrf
......
Bootstrap: docker
From: mambaorg/micromamba:latest
%labels
maintainer IT-IMGW <it.img-wien@univie.ac.at>
%files
../../definition-files/runscript /.singularity.d/runscript
../../definition-files/run-help /.singularity.d/runscript.help
%post
# install curl
apt -y update && apt -y install curl && apt -y clean
# install python packages
micromamba -q install -y -n base -c conda-forge \
pygrib=2.1.4 \
cartopy=0.21 \
netCDF4=1.5.8 \
pyyaml=6.0
# include WRF/WPS scripts
mkdir -p /opt/wrf && cd /opt/wrf
curl -sL https://github.com/NCAR/container-dtc-nwp/archive/refs/tags/v4.1.0.tar.gz | tar --strip-components=5 -zxC . container-dtc-nwp-4.1.0/components/scripts/common/python/
# fix wired paths
sed -i 's,/home/pythonprd,./,g' *.py
sed -i 's,/home/postprd/,./,g' *.py
sed -i 's,/home/scripts/case/,./,g' *.py
sed -i 's,cartopy.config,#cartopy.config,g' *.py
# final
CNAME=wrfpy
# does not work goes into /.singularity.d/env/91-environment.sh
echo "export PS1=\"[IMGW-$CNAME]\w\$ \"" >> /.singularity.d/env/99-zz-custom-env.sh
# add some labels
echo "libc $(ldd --version | head -n1 | cut -d' ' -f4)" >> "$SINGULARITY_LABELS"
echo "linux $(cat /etc/os-release | grep PRETTY_NAME | cut -d'=' -f2)" >> "$SINGULARITY_LABELS"
%environment
export MPLCONFIGDIR=.
export LANG=C.UTF-8
export PATH=/opt/conda/bin:$PATH
export LIBRARY=/opt/conda/lib
export INCLUDE=/opt/conda/include
export PYTHONDONTWRITEBYTECODE=1
export PYTHONPATH=/opt/wrf:$PYTHONPATH
models/WRF/dtc_nwp_flow_3.png

93.2 KiB

#!/bin/bash
# By MB
# Based on the Example from DTCenter, Boulder NCAR.
# https://dtcenter.org/nwp-containers-online-tutorial
# https://dtcenter.org/nwp-containers-online-tutorial/hurricane-sandy-case-27-oct-2012
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m'
pause() {
if $DEBUG; then
read -n1 -rsp $'Press any key to continue or Ctrl+C to exit...\n'
fi
}
warning() {
printf "%-68s[$YELLOW%10s$NC]\n" "$@" "SKIPPED"
return 0
}
report() {
if [ $? -eq 0 ]; then
printf "%-68s[$GREEN%10s$NC]\n" "$@" "OK"
return 0
else
printf "%-68s[$RED%10s$NC]\n" "$@" "FAILED"
return 1
fi
}
question() {
read -p "$@ (y/n)" yn
case $yn in
[Yy]*) return 0 ;;
*)
warning "skipping"
return 1
;;
esac
}
INAME="SANDY"
CPATH=$(dirname $0)
case_name=sandy
WRFCONTAINER=$CPATH/WRF-4.4.1-em_real.sif
PYCONTAINER=$CPATH/WRFpy.sif
GISCONTAINER=$CPATH/GSI-3.7.sif
NPROC=1
if [ $NPROC -gt 1 ]; then
warning "[$INAME] Trying to run with n=$NPROC processes."
echo "[$INAME] Make sure mpirun can be accessed!"
command -v mpirun
report "[$INAME] mpirun command works" || exit 1
# Check if Versions are similar ?
# MPI Version outside needs to be newer!
fi
# Check if we have the container?
# test -f Makefile
# report "[$INAME] Makefile found"
# if [ $? -eq 0 ]; then
# echo "[$INAME] Building WRF ..."
# make em_real
# fi
# Do a pull request from the IMGW library to retrieve the container
test -f $WRFCONTAINER
report "[$INAME] WRF em_real [$WRFCONTAINER]"
if [ $? -ne 0 ]; then
# Try to pull
echo "Manual: https://cloud.sylabs.io/library/mblaschek/models/wrf"
singularity pull $WRFCONTAINER library://mblaschek//models/wrf-emreal:4.4.1
report "[$INAME] WRF em_real pulled from library" || exit 1
fi
test -f $PYCONTAINER
report "[$INAME] WRF python [$PYCONTAINER]" || exit 1
test -f $GISCONTAINER
report "[$INAME] GIS [$GISCONTAINER]" || exit 1
PROJ_PATH=
read -p "[$INAME] Where do you want to place all files? [$PWD] :" PROJ_PATH
if [ -z "$PROJ_PATH" ]; then
PROJ_PATH=.
fi
INPUT_DIR=$PROJ_PATH/data
mkdir -vp $PROJ_PATH/sandy/data/
report "[$INAME] using $PROJ_PATH"
cd $PROJ_PATH/sandy/data/
# curl -SL https://dtcenter.ucar.edu/dfiles/container_nwp_tutorial/tar_files/container-dtc-nwp-derechodata_20120629.tar.gz | tar zxC .
# curl -SL https://dtcenter.ucar.edu/dfiles/container_nwp_tutorial/tar_files/container-dtc-nwp-snowdata_20160123.tar.gz | tar zxC .
test -d model_data
report "[$INAME] Sandy input data"
if [ $? -ne 0 ]; then
curl -SL https://dtcenter.ucar.edu/dfiles/container_nwp_tutorial/tar_files/container-dtc-nwp-sandydata_20121027.tar.gz | tar zxC .
report "[$INAME] using Sandy input data" || exit 1
fi
test -d obs_data
report "[$INAME] Sandy obs data"
if [ $? -ne 0 ]; then
curl -SL https://dtcenter.ucar.edu/dfiles/container_nwp_tutorial/tar_files/CRTM_v2.3.0.tar.gz | tar zxC .
report "[$INAME] using Sandy obs data" || exit 1
fi
test -d WPS_GEOG
report "[$INAME] Sandy geogrid data"
if [ $? -ne 0 ]; then
curl -SL https://dtcenter.ucar.edu/dfiles/container_nwp_tutorial/tar_files/wps_geog.tar.gz | tar zxC .
report "[$INAME] using Sandy geogrid data" || exit 1
fi
test -d shapefiles
report "[$INAME] Sandy natural earth data"
if [ $? -ne 0 ]; then
curl -SL https://dtcenter.ucar.edu/dfiles/container_nwp_tutorial/tar_files/shapefiles.tar.gz | tar zxC .
report "[$INAME] using Sandy natural earth data" || exit 1
fi
# return back
cd $PROJ_PATH/sandy
#
# dtcenter
# https://github.com/NCAR/container-dtc-nwp
#
#
# Setup Case files for SANDY
#
# Get sandy case files
curl -sL https://github.com/NCAR/container-dtc-nwp/archive/refs/tags/v4.1.0.tar.gz | tar --strip-components=4 -zxC . container-dtc-nwp-4.1.0/components/scripts/sandy_20121027
# fix paths
sed -i 's,/data/,./data/,g' set_env.ksh
sed -i 's,/data/,./data/,g' namelist.wps
sed -i 's,/comsoftware/wrf/WPS-4.3,./g' namelist.wps
# Optional run plot
# needs cartopy and python
# /home/scripts/common/run_python_domain.ksh
#
# Get default em_real configuration files from /wrf/run
# WRF/run and WPS/geogrid. WPS/metgrid, WPS/ungrib
#
echo "[$INAME] CP em_real files to current directory [no overwriting]"
./$WRFCONTAINER init
report "[$INAME] WRF init complete" || exit 1
# Check if Variable Tables are present / depends on input files
source set_env.ksh
# input_data must be sourced
test -d Vtable.${input_data}
report "[$INAME] Varible Table Vtable.${input_data} found" || {echo "Please provide Variable Table!"
exit 1}
# Link Grib files to here
link_grib.csh $INPUT_DIR/model_data/${case_name}/*
report "[$INAME] Model input data from: $INPUT_DIR/model_data/${case_name}/"
#
# geogrid data
#
# Remove old files
if [ -e geo_em.d*.nc ]; then
rm -rf geo_em.d*.nc
fi
# Command for geogrid
echo "[$INAME] Running geogrib ..."
singularity exec $WRFCONTAINER geogrid.exe >run_geogrid.log 2>&1
test -f geo_em.d01.nc
report "[$INAME] WPS geogrid ready." || exit 1
#
# ungrib data
#
file_date=$(cat namelist.wps | grep -i start_date | cut -d"'" -f2 | cut -d":" -f1)
# remove old files
if [ -e PFILE:${file_date} ]; then
rm -rf PFILE*
fi
if [ -e FILE:${file_date} ]; then
rm -rf FILE*
fi
# Command for ungrib
echo "[$INAME] Running ungrib ..."
singularity exec $WRFCONTAINER ungrib.exe >run_ungrib.log 2>&1
ls -ls FILE:*
report "[$INAME] WPS ungrib ready." || exit 1
#
# metgrid data
#
# Remove old files
if [ -e met_em.d*.${file_date}:00:00.nc ]; then
rm -rf met_em.d*
fi
# Command for metgrid
echo "[$INAME] Running metgrid ..."
singularity exec $WRFCONTAINER metgrid.exe >run_metgrid.log 2>&1
ls -ls met_em.d01.*
report "[$INAME] WPS metgrid ready." || exit 1
echo "[$INAME] WPS setup complete. Ready for WRF"
#
# WRF
#
sed -e '/nocolons/d' namelist.input >nml
cp namelist.input namelist.nocolons
mv nml namelist.input
# Remove old files
if [ -e wrfinput_d* ]; then
rm -rf wrfi* wrfb*
fi
# Command for real
echo "[$INAME] Running real ..."
# This can be run with MPI or serial?
if [ $NPROC -gt 1 ]; then
# MPI
module load openmpi/
mpirun -np $NPROC singularity exec $WRFCONTAINER real.exe >run_real.log 2>&1
else
# Serial run
singularity exec $WRFCONTAINER real.exe >run_real.log 2>&1
fi
test -f wrfinput_d01 && test -f wrfbdy_d01
report "[$INAME] WRF run finished"
#
# GSI Data Assimilation
#
if false; then
echo "[$INAME] Running gsi ..."
singularity run --app init $GISCONTAINER
test -f run_gsi.ksh
report "[$INAME] GSI run script ready" || exit 1
# add the correct run Command
if [ $NPROC -gt 1 ]; then
echo "export RUN_COMMAND=\"mpirun -np $NPROC singularity exec $GISCONTAINER \$GIS_EXE\"" >>set_env.ksh
else
echo "export RUN_COMMAND=\"singularity exec $GISCONTAINER \$GIS_EXE\"" >>set_env.ksh
fi
# Execute GSI
./run_gsi.ksh
fi
#
# run WRF
#
# If wrfinput_d01.orig exists, rename it to wrfinput_d01 to reset the state
if [[ -e wrfinput_d01.orig ]]; then
mv wrfinput_d01.orig wrfinput_d01
fi
# If GSI was run, update the wrfinput file
if [[ -e ./wrf_inout ]]; then
mv wrfinput_d01 wrfinput_d01.orig
cp ./wrf_inout wrfinput_d01
fi
if [ $NPROC -gt 1 ]; then
mpirun -np $NPROC singularity exec $WRFCONTAINER wrf.exe >run_wrf.log 2>&1
else
singularity exec $WRFCONTAINER wrf.exe >run_wrf.log 2>&1
fi
#!/bin/bash
# By MB
# Based on the Example from DTCenter, Boulder NCAR.
# https://dtcenter.org/nwp-containers-online-tutorial
# https://dtcenter.org/nwp-containers-online-tutorial/hurricane-sandy-case-27-oct-2012
CPATH=$(dirname $0)
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m'
pause() {
if $DEBUG; then
read -n1 -rsp $'Press any key to continue or Ctrl+C to exit...\n'
fi
}
warning() {
printf "%-68s[$YELLOW%10s$NC]\n" "$@" "SKIPPED"
return 0
}
report() {
if [ $? -eq 0 ]; then
printf "%-68s[$GREEN%10s$NC]\n" "$@" "OK"
return 0
else
printf "%-68s[$RED%10s$NC]\n" "$@" "FAILED"
return 1
fi
}
question(){
read -p "$@ (y/n)" yn
case $yn in
[Yy]*) return 0;;
* ) warning "skipping"; return 1;;
esac
}
INAME="SANDY"
# Check if we have the container?
test -f Makefile
report "[$INAME] Makefile found"
if [ $? -eq 0 ]; then
echo "[$INAME] Building WRF ..."
make em_real
fi
test -f WRF-4.4.1-em_real.sif
report "[$INAME] WRF em_real container available" || exit 1
PROJ_PATH=
read -p "[$INAME] Where do you want to place all files? [$PWD] :" PROJ_PATH
if [ -z "$PROJ_PATH" ]; then
PROJ_PATH=.
fi
mkdir -vp $PROJ_PATH/sandy/data/
report "[$INAME] using $PROJ_PATH"
cd $PROJ_PATH/sandy/data/
# curl -SL https://dtcenter.ucar.edu/dfiles/container_nwp_tutorial/tar_files/container-dtc-nwp-derechodata_20120629.tar.gz | tar zxC .
# curl -SL https://dtcenter.ucar.edu/dfiles/container_nwp_tutorial/tar_files/container-dtc-nwp-snowdata_20160123.tar.gz | tar zxC .
test -d model_data
report "[$INAME] Sandy input data"
if [ $? -ne 0 ]; then
curl -SL https://dtcenter.ucar.edu/dfiles/container_nwp_tutorial/tar_files/container-dtc-nwp-sandydata_20121027.tar.gz | tar zxC .
report "[$INAME] using Sandy input data" || exit 1
fi
test -d obs_data
report "[$INAME] Sandy obs data"
if [ $? -ne 0 ]; then
curl -SL https://dtcenter.ucar.edu/dfiles/container_nwp_tutorial/tar_files/CRTM_v2.3.0.tar.gz | tar zxC .
report "[$INAME] using Sandy obs data" || exit 1
fi
test -d WPS_GEOG
report "[$INAME] Sandy geogrid data"
if [ $? -ne 0 ]; then
curl -SL https://dtcenter.ucar.edu/dfiles/container_nwp_tutorial/tar_files/wps_geog.tar.gz | tar zxC .
report "[$INAME] using Sandy geogrid data" || exit 1
fi
test -d shapefiles
report "[$INAME] Sandy trajectory data"
if [ $? -ne 0 ]; then
curl -SL https://dtcenter.ucar.edu/dfiles/container_nwp_tutorial/tar_files/shapefiles.tar.gz | tar zxC .
report "[$INAME] using Sandy trajectory data" || exit 1
fi
exit 0
/home/scripts/common/run_python_domain.ksh
WPS_GEOG
/home/scripts/common/run_wps.ksh
/home/scripts/common/run_real.ksh
\ No newline at end of file
...@@ -16,16 +16,19 @@ bind special directories ...@@ -16,16 +16,19 @@ bind special directories
development using includes and libraries from inside the container development using includes and libraries from inside the container
> ./image.sif gfortran -I\$INCLUDE -L\$LIBRARY -o test.x test.f90 > ./image.sif gfortran -I\$INCLUDE -L\$LIBRARY -o test.x test.f90
WRF Simulations - Example WRF Simulations - Example
default run files are located in /wrf/run default run files are located in /wrf/run
executables are located in /wrf/bin executables are located in /wrf/bin
Default simulation of the container (still requires input) Default simulation of the container (still requires input), will do
1. copies /wrf/run files to the local directory 1. It will copy /wrf/run files to the local directory (.)
2. No local files will be overwritten (e.g. namelists ) 2. No local files will be overwritten (e.g. namelists), just missing files.
3. Execute real.exe 3. Execute real.exe, run:
> ./image.sif real.exe > ./image.sif real.exe
> ./image.sif wrf.exe
Using OpenMPI (requires an openmpi version on the host system)
> mpirun -np 8 ./image.sif real.exe
Manually copy run files to current directory Manually copy run files to current directory
> ./image.sif init > ./image.sif init
......
This diff is collapsed.
...@@ -14,16 +14,17 @@ if [ $# -gt 0 ]; then ...@@ -14,16 +14,17 @@ if [ $# -gt 0 ]; then
echo "[WRF] no automatic copying" echo "[WRF] no automatic copying"
fi fi
# run cmd line options # run cmd line options
echo "Executing" echo "[WRF] Executing"
exec "$@" exec "$@"
echo "Finished inside the container." echo "[WRF] Finished inside the container."
else else
# interactive # interactive
echo "Welcome inside an IMGW container!" echo "[WRF] Welcome inside an IMGW container!"
echo "Get help: singularity run-help $SINGULARITY_NAME" echo "[WRF] Get help: singularity run-help $SINGULARITY_NAME"
echo "WRF ($WRF_VERSION, $WRF_BUILD_TARGET)" echo "[WRF] $WRF_VERSION, $WRF_BUILD_TARGET"
echo "WRF $WRF_BUILD_TARGET -> /wrf/run" echo "[WRF] $WRF_BUILD_TARGET -> /wrf/run"
echo "WRF exe -> /wrf/bin" echo "[WRF] exe -> /wrf/bin"
cd /wrf/run echo "[WRF] Automatic copy of /wrf/run to current directory: ./$(basename $SINGULARITY_CONTAINER) init"
echo "[WRF] e.g. run real.exe from outside: ./$(basename $SINGULARITY_CONTAINER) real.exe"
exec /.singularity.d/actions/shell "$@" exec /.singularity.d/actions/shell "$@"
fi fi
\ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment