Skip to content
Snippets Groups Projects
Commit 24ab1e48 authored by lkugler's avatar lkugler
Browse files

rename scripts folder

parent 164914ad
No related branches found
No related tags found
No related merge requests found
Showing
with 94 additions and 6 deletions
...@@ -5,3 +5,6 @@ dask-worker-space/ ...@@ -5,3 +5,6 @@ dask-worker-space/
DART_WRF.egg* DART_WRF.egg*
build* build*
._* ._*
.vscode
dist/
experimental/
\ No newline at end of file
...@@ -79,11 +79,11 @@ vsc.archive_base = '/gpfs/data/fs71386/lkugler/sim_archive/' ...@@ -79,11 +79,11 @@ vsc.archive_base = '/gpfs/data/fs71386/lkugler/sim_archive/'
vsc.srcdir = '/gpfs/data/fs71386/lkugler/compile/WRF/WRF-4.3/run' vsc.srcdir = '/gpfs/data/fs71386/lkugler/compile/WRF/WRF-4.3/run'
vsc.dart_srcdir = '/gpfs/data/fs71386/lkugler/compile/DART/DART-9.11.9/models/wrf/work' vsc.dart_srcdir = '/gpfs/data/fs71386/lkugler/compile/DART/DART-9.11.9/models/wrf/work'
vsc.rttov_srcdir = '/gpfs/data/fs71386/lkugler/compile/RTTOV13/rtcoef_rttov13/' vsc.rttov_srcdir = '/gpfs/data/fs71386/lkugler/compile/RTTOV13/rtcoef_rttov13/'
vsc.scriptsdir = '/home/fs71386/lkugler/DART-WRF/scripts/' vsc.scriptsdir = '/home/fs71386/lkugler/DART-WRF/dartwrf/'
# templates/run scripts # templates/run scripts
vsc.namelist = vsc.scriptsdir+'/../templates/namelist.input' vsc.namelist = vsc.scriptsdir+'/../templates/namelist.input'
vsc.run_WRF = '/home/fs71386/lkugler/DART-WRF/scripts/run_ens.vsc.sh' vsc.run_WRF = '/home/fs71386/lkugler/DART-WRF/dartwrf/run_ens.vsc.sh'
vsc.slurm_cfg = {"account": "p71386", "partition": "mem_0384", "qos": "p71386_0384", vsc.slurm_cfg = {"account": "p71386", "partition": "mem_0384", "qos": "p71386_0384",
"nodes": "1", "ntasks": "1", "ntasks-per-node": "48", "ntasks-per-core": "1", "nodes": "1", "ntasks": "1", "ntasks-per-node": "48", "ntasks-per-core": "1",
......
File moved
File moved
File moved
File moved
File moved
File moved
...@@ -569,7 +569,7 @@ class ObsSeq(object): ...@@ -569,7 +569,7 @@ class ObsSeq(object):
if __name__ == "__main__": if __name__ == "__main__":
# for testing purposes # for testing purposes
obs = ObsSeqOut(cluster.scriptsdir + "/../tests/obs_seq.orig.out") obs = ObsSeq(cluster.scriptsdir + "/../tests/obs_seq.orig.out")
# select a subset (lat-lon) # select a subset (lat-lon)
obs.superob(window_km=50) obs.superob(window_km=50)
......
File moved
File moved
File moved
File moved
export SLURM_STEP_GRES=none
## $SLURM_ARRAY_TASK_ID
echo "SLURM_ARRAY_TASK_ID:"$SLURM_ARRAY_TASK_ID
EXPNAME="OSSE_v1.10_test"
NAMELIST="namelist.input"
USERDIR=/jetfs/home/lkugler/
DATADIR=$USERDIR
SRC_DIR=/jetfs/home/lkugler/compile/WRF/WRF-v4.2/run/
IDEAL_EXE=/jetfs/home/lkugler/compile/bin/ideal.exe
WRF_EXE=/jetfs/home/lkugler/compile/bin/wrf-v4.2_v1.10.dmpar.exe
# VSC support: be careful with correct pinning !
pinning=(0-11 12-23 24-35 36-47)
for ((n=1; n<=4; n++))
do
RUNDIR=$USERDIR/run_WRF/$EXPNAME/$IENS
cd $RUNDIR
rm -r wrfout_d01_*
echo 'mpirun -genv I_MPI_PIN_PROCESSOR_LIST=${pinning[$n]} -np 12 ./wrf.exe >/dev/null 2>&1'
mpirun -genv I_MPI_PIN_PROCESSOR_LIST=${pinning[$n]} -np 12 ./wrf.exe >/dev/null 2>&1
cd ../
done
wait
module purge
module load intel-mpi/2019.6 intel/19.1.0 netcdf-fortran/4.4.5-intel-19.0.5.281-qye4cqn zlib/1.2.11-intel-19.1.0.166-hs6m2qh hdf5/1.10.5-intel-19.0.5.281-qyzojtm netcdf/4.7.0-intel-19.0.5.281-75t52g6
export I_MPI_DEBUG=4
export OMP_NUM_THREADS=1
mem_per_task=10G
cpu_per_task=12
echo "SLURM_ARRAY_TASK_ID:"$SLURM_ARRAY_TASK_ID
EXPNAME=<exp.expname>
MAINDIR=<cluster.wrf_rundir_base>
pinning=(0-11 12-23 24-35 36-47)
mytasks=4
for i in `seq 1 $mytasks`
do
IENS="$(((($SLURM_ARRAY_TASK_ID - 1)* 4) + $n))" # ensemble number (5,6,7,8 for job array element 2)
RUNDIR=$MAINDIR/$EXPNAME/$IENS
echo "ENSEMBLE NR: "$IENS" in "$RUNDIR
cd $RUNDIR
rm -rf rsl.out.0*
echo "srun --mem=$mem_per_task --cpus-per-task=$cpu_per_task --cpu_bind=map_cpu:${pinning[$n-1]} --ntasks=1 ./wrf.exe &"
srun --mem=$mem_per_task --cpus-per-task=$cpu_per_task --cpu_bind=map_cpu:${pinning[$n-1]} --ntasks=1 ./wrf.exe &
cd ../
done
wait
# error checking
for ((n=1; n<=4; n++))
do
IENS="$(((($SLURM_ARRAY_TASK_ID - 1)* 4) + $n))"
RUNDIR=$MAINDIR/$EXPNAME/$IENS
cd $RUNDIR
line=`tail -n 1 rsl.out.0000`
if [[ $line == *"SUCCESS COMPLETE WRF"* ]];
then
echo $RUNDIR 'SUCCESS COMPLETE WRF'
else
echo $RUNDIR $line
exit 1
fi
done
module purge
module load intel-mpi/2019.6 intel/19.1.0 netcdf-fortran/4.4.5-intel-19.0.5.281-qye4cqn zlib/1.2.11-intel-19.1.0.166-hs6m2qh hdf5/1.10.5-intel-19.0.5.281-qyzojtm netcdf/4.7.0-intel-19.0.5.281-75t52g6
## $SLURM_ARRAY_TASK_ID
echo "SLURM_ARRAY_TASK_ID:"$SLURM_ARRAY_TASK_ID
EXPNAME=<exp.expname>
MAINDIR=<cluster.wrf_rundir_base>
IENS=$SLURM_ARRAY_TASK_ID
RUNDIR=$MAINDIR/$EXPNAME/$IENS
echo "ENSEMBLE NR: "$IENS" in "$RUNDIR
cd $RUNDIR
rm -rf rsl.out.0*
mpirun -genv I_MPI_PIN_PROCESSOR_LIST=0-19 -np 20 ./wrf.exe
File moved
File moved
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment