[Dart-dev] [6213] DART/branches/development/models/mpas_atm/shell_scripts: updates from so-young.
nancy at ucar.edu
nancy at ucar.edu
Fri May 31 12:49:05 MDT 2013
Revision: 6213
Author: nancy
Date: 2013-05-31 12:49:04 -0600 (Fri, 31 May 2013)
Log Message:
-----------
updates from so-young.
Modified Paths:
--------------
DART/branches/development/models/mpas_atm/shell_scripts/advance_model.csh
DART/branches/development/models/mpas_atm/shell_scripts/advance_model.template.lsf
DART/branches/development/models/mpas_atm/shell_scripts/filter.template.lsf
Added Paths:
-----------
DART/branches/development/models/mpas_atm/shell_scripts/create_filter_ics.csh
DART/branches/development/models/mpas_atm/shell_scripts/driver_mpas_dart.csh
-------------- next part --------------
Modified: DART/branches/development/models/mpas_atm/shell_scripts/advance_model.csh
===================================================================
--- DART/branches/development/models/mpas_atm/shell_scripts/advance_model.csh 2013-05-31 15:21:49 UTC (rev 6212)
+++ DART/branches/development/models/mpas_atm/shell_scripts/advance_model.csh 2013-05-31 18:49:04 UTC (rev 6213)
@@ -13,35 +13,37 @@
# after the analysis step is done at each cycle.
#
# This script performs the following:
-# 1. Creates a temporary directory to run a MPAS-A realization (see options)
-# 2. Copies or links the necessary files into the temporary directory
-# 3. Converts DART state vectors to mpas input
+# 1. Creates a temporary directory to run an MPAS-A realization (see options)
+# 2. Copies or links the files necessary for the model run into the temporary directory
+# 3. Converts DART state vectors to the mpas input at the beginning
# 4. Updates an MPAS namelist from a template with new dates
-# 5. Runs MPAS-A model until target time
-# (with either a restart or an output file from the previous cycle).
+# 5. Runs the MPAS-A model in a restart mode until target time is reached
# 7. Checks for incomplete runs
-# 8. Converts mpas output to DART state vectors
-# 9. Saves the analysis file if save_analysis = true
+# 8. Converts mpas output to a DART binary file for the next analysis cycle
+# 9. Saves the mpas analysis file for each member if save_analysis = true
+#10. Saves the mpas forecast file for each member if save_forecast = true
+#11. Saves u on the edges if save_analysis = false and use_u_wind = false (for extended forecasts later)
+#12. Saves diagnostic variables at pressure levels (both for analysis and forecast at the cycle)
+# if save_diag_plev = true
+# For this, you need to create an mpas output (not restart) file at the target time
+# since only mpas output files print out those diagnostic fields at pressure levels.
#
-# This script does NOT do the following:
-# 1. Back up the model output file at the end of target time.
-# Users should edit this script to back up the output file (which will be
-# used as a background for the next analysis cycle). Otherwise,
-# the background file will be overwritten in the following cycle.
-# 2. Edit input.nml for model_to_dart and dart_to_model conversion.
-# It is your responsibility to provide the files as assigned in
-# model_analysis_filename, model_to_dart_output_file, and dart_to_model_input_file.
-# Also, 'dart_to_model' expects to have advance_time_present = .true.
-# to generate 'mpas_time' for the current and target time info for the model run.
+# Note: 1. MPAS is run in a restart mode during the cycles, which means
+# both input and output will be restart files.
+# This also means that one cannot delete temp_dir since we need a
+# place to keep the restart file for each member during the cycle.
+# 2. If save_analysis = false and save_forecast = false,
+# the mpas analysis file will be overwritten by
+# the mpas forecast file to be used as a background for the next analysis cycle.
+# 3. dart_to_model expects to have advance_time_present = .true. in input.nml
+# to generate 'mpas_time' for the current and target time info for the forecast run.
+# 4. For the required data to run this script, check the section of 'dependencies'.
#
-# Arguments are (created by 'filter' or 'perfect_model_obs'):
+# Arguments for this script (created by 'filter' or 'perfect_model_obs') are:
# 1) the process number of caller,
# 2) the number of ensemble members/state copies belonging to that process, and
# 3) the name of the control_file for that process.
#
-# Note: For the required data to run this script,
-# check the section of 'dependencies'.
-#
# If this script finishes and the 'control_file' still exists, it is
# an ERROR CONDITION and means one or more of the ensemble members did
# not advance properly. Despite our best attempts to trap on this
@@ -70,31 +72,20 @@
# The run-time directory for the entire experiment is called CENTRALDIR;
set CENTRALDIR = `pwd`
-# Create a clean temporary directory and go there.
-# But we need to keep this temp_dir for an updated mpas_init.nc for the next cycle
-# (to provide an updated time info and for the case of an incremental approach for
-# horizontal winds for each member which also requires individual_members = true below.)
-# So let us keep it false here.
-set delete_temp_dir = false
+# Do you want to save the analysis file?
+set save_analysis = false
-# set this to true if you want to maintain complete individual input/output
-# for each member (to carry through non-updated fields)
-set individual_members = true
+# Do you want to save the forecast at the target time?
+set save_forecast = false
-# next line ensures that the last cycle leaves everything in the temp dirs
-if ( $individual_members == true ) set delete_temp_dir = false
+# Do you want to save the diagnostic variables at pressure levels? (true or false)
+set save_diag_plev = true
-# Is the model running in a restart mode? true or false
-set is_restart = true
-
-# Do you want to save the analysis file?
-set save_analysis = true
-
#
-set REMOVE = '/bin/rm -rf'
-set COPY = '/bin/cp -p'
-set MOVE = '/bin/mv -f'
-set LINK = '/bin/ln -sf'
+set REMOVE = 'rm -rf'
+set COPY = 'cp -p'
+set MOVE = 'mv -f'
+set LINK = 'ln -sf'
unalias cd
unalias ls
@@ -148,10 +139,6 @@
# Create a new temp directory for each member unless requested to keep and it exists already.
set temp_dir = 'advance_temp'${ensemble_member}
- if ( $delete_temp_dir == "true" ) then
- if( -d $temp_dir ) ${REMOVE} $temp_dir || exit 1
- endif
-
if(! -d $temp_dir) mkdir -p $temp_dir || exit 1
cd $temp_dir || exit 1
@@ -164,12 +151,28 @@
${COPY} ${CENTRALDIR}/input.nml . || exit 1
${COPY} ${CENTRALDIR}/namelist.input namelist.input.template || exit 1
- # Get the grid info files
- set fs_grid = `grep config_decomp_file_prefix namelist.input.template | awk '{print $3}' | sed -e "s/'//g"`
- set f_grid = `basename $fs_grid .part.`
+ # Get the grid info files - now for PIO
+ set is_grid_info = `grep config_block_decomp_file_prefix namelist.input.template | wc -l`
+ if( $is_grid_info != 1 ) then
+ echo Cannot find grid info. Stop.
+ exit
+ endif
+ set fs_grid = `grep config_block_decomp_file_prefix namelist.input.template | awk '{print $3}' | sed -e "s/'//g"`
${LINK} ${CENTRALDIR}/MPAS_RUN/${fs_grid}* .
- #${LINK} ${CENTRALDIR}/MPAS_RUN/${f_grid} .
+ # Surface update
+ set if_sfc_update = `grep config_sst_update namelist.input.template | awk '{print $3}'`
+ if($if_sfc_update == .true.) then
+ set fsfc = `grep config_sfc_update_name namelist.input.template | awk '{print $3}' | sed "s/'//g"`
+ if(-e ${CENTRALDIR}/MPAS_RUN/$fsfc) then
+ echo ${LINK} ${CENTRALDIR}/MPAS_RUN/$fsfc .
+ ${LINK} ${CENTRALDIR}/MPAS_RUN/$fsfc .
+ ls -lL $fsfc || exit
+ else
+ echo $fsfc does not exist in ${CENTRALDIR}/MPAS_RUN/. || exit
+ endif
+ endif
+
# Get the in/out file names for converters and the model
set f1 = `grep dart_to_model_input_file input.nml | awk '{print $3}' | cut -d ',' -f1 | sed -e "s/'//g"`
set f2 = `grep model_to_dart_output_file input.nml | awk '{print $3}' | cut -d ',' -f1 | sed -e "s/'//g"`
@@ -181,13 +184,17 @@
set ff = `echo $f3 | cut -d . -f1`
set fn = ${ff}.e${ensemble_member}.nc
- if(! -e ${CENTRALDIR}/$fn) then
- echo ABORT\: ${CENTRALDIR}/$fn does not exist.
- exit
+ if(! -e ${f3}) then
+ #if(! -e ${CENTRALDIR}/$fn) then
+ # echo ABORT\: ${CENTRALDIR}/$fn does not exist.
+ # exit
+ #endif
+ echo ${COPY} ${CENTRALDIR}/$fn ${f3}
+ ${COPY} ${CENTRALDIR}/$fn ${f3} || exit 2
+ # echo ${COPY} ${CENTRALDIR}/$f3 ${f3}
+ # ${COPY} ${CENTRALDIR}/$f3 ${f3} || exit 2
endif
- if(! -e ${f3}) ${COPY} ${CENTRALDIR}/$fn ${f3} || exit 2
-
${MOVE} ${CENTRALDIR}/$input_file $f1 || exit 2
# Overwrite a template file (or prior) with the analysis from filter.
@@ -196,48 +203,55 @@
# The program dart_to_model has created an ascii file named mpas_time.
# Time information is extracted from the file.
- set curr_utc = `head -1 mpas_time | tail -1`
+ set curr_utc = `head -1 mpas_time | tail -1` || exit
set targ_utc = `head -2 mpas_time | tail -1`
set intv_utc = `head -3 mpas_time | tail -1`
${MOVE} out.dart_to_model out.dart_to_model.${curr_utc}
- if ( $is_restart == true ) then
-
- set if_DAcycling = `grep config_do_DAcycling namelist.input.template | wc -l`
- if($if_DAcycling == 0) then
- echo Please add config_do_DAcycling = .true. in &restart
- echo in ${CENTRALDIR}/namelist.input.
- exit -1
- endif
+ set if_DAcycling = `grep config_do_DAcycling namelist.input.template | wc -l`
+ if($if_DAcycling == 0) then
+ echo Please add config_do_DAcycling = .true. in &restart
+ echo in ${CENTRALDIR}/namelist.input.
+ exit -1
+ endif
- set ftype = "restart"
- set finit = "config_"${ftype}"_name"
- set fhead = `basename $f3 .nc`
- set f3new = ${fhead}.${curr_utc}.nc
- set fname = "config_"${ftype}"_name"
- set fintv = "config_"${ftype}"_interval"
+ set ftype = "restart"
+ set finit = "config_"${ftype}"_name"
+ set fhead = `basename $f3 .nc`
+ set f3new = ${fhead}.${curr_utc}.nc
+ set fname = "config_"${ftype}"_name"
+ set fintv = "config_"${ftype}"_interval"
+ set fjunk = `grep config_output_name namelist.input.template | awk '{print $3}' | sed -e "s/'//g"`
+ set fremove = `basename $fjunk .nc`
- ln -sf $f3 ${f3new}
- else
- set ftype = "output"
- set finit = "config_input_name"
- set fname = "config_"${ftype}"_name"
- set fintv = "config_"${ftype}"_interval"
- set filnm = `grep $fname namelist.input.template | awk '{print $3}' | sed -e "s/'//g"`
- set fhead = `basename $filnm .nc`
- set f3new = `basename $f3 .nc`.${curr_utc}.nc
- endif
+ # Rename the restart file for PIO
+ set f3pio = `echo ${f3new} | sed -e 's/:/\./g'`
+ # In a restart mode, the model looks for an IC filename not as specified in
+ # config_restart_name, but as ${finit}.${date_in_pio_format}.nc.
+ # In other words, the actual initial file name should be in a new form
+ # including the date info as in PIO format to run the model.
+ # This is neither straightforward nor consistent with the way we define the model input
+ # file in DART (i.e., model_analysis_filename) to run model_to_dart after the model run.
+ # So we just make a link between the two different filenames here.
+ ${LINK} $f3 ${f3pio}
+
#----------------------------------------------------------------------
# Block 3: advance the model
# Make sure the file name is consistent in the namelist.input.
+ # Mar-21-2013: To save the variables at pressure levels,
+ # we print out output files as well.
#----------------------------------------------------------------------
cat >! script.sed << EOF
/config_start_time/c\
config_start_time = '$curr_utc'
/config_stop_time/c\
config_stop_time ='$targ_utc'
+ /config_run_duration/c\
+ config_run_duration = '$intv_utc'
+ /config_output_interval/c\
+ config_output_interval = '$intv_utc'
/$fintv/c\
$fintv = '$intv_utc'
/$finit/c\
@@ -249,19 +263,12 @@
sed -f script.sed namelist.input.template >! namelist.input
- if ( $is_restart == true ) then
cat >! restart.sed << EOF
/config_do_restart /c\
config_do_restart = .true.
/config_do_DAcycling /c\
config_do_DAcycling = .true.
EOF
- else
- cat >! restart.sed << EOF
- /config_do_restart /c\
- config_do_restart = .false.
-EOF
- endif
${MOVE} namelist.input namelist.input.temp
sed -f restart.sed namelist.input.temp >! namelist.input
@@ -269,9 +276,9 @@
# clean out any old rsl files
if ( -e log.0000.out ) ${REMOVE} log.*
- # run MPAS here
- # mpi run on bluefire
- mpirun.lsf /usr/local/bin/launch ./nhyd_atmos_model.exe || exit 3
+ # mpi run on Yellowstone
+ mpirun.lsf ./nhyd_atmos_model.exe || exit 3
+
# mpi run on Mac OS
#mpiexec -n 4 ./nhyd_atmos_model.exe || exit 3
@@ -279,8 +286,8 @@
ls -lrt > list.${curr_utc}
# Model output at the target time
- set fout = `ls -1 ${fhead}.*.nc | tail -1`
- set date_utc = `echo $fout | awk -F. '{print $(NF-1)}'`
+ set fout = ${fhead}.`echo ${targ_utc} | sed -e 's/:/\./g'`.nc
+ set date_utc = `ncdump -v xtime $fout | tail -2 | head -1 | cut -d";" -f1 | sed -e 's/"//g'`
set targ_grg = `echo $date_utc 0 -g | advance_time`
set targ_day = $targ_grg[1]
set targ_sec = $targ_grg[2]
@@ -293,31 +300,71 @@
endif
#-------------------------------------------------------------------
+ # Back up some fields before cleaning up.
+ #-------------------------------------------------------------------
+ set f3utc = `echo ${curr_utc} 0 | advance_time`
+ set if_u_used = `grep use_u_for_wind input.nml | awk '{print $3}' | cut -d ',' -f1`
+ if ( $if_u_used == .false. ) then
+ ncks -v xtime,u $f3 analysis.uedge.${f3utc}.nc
+ ls -l analysis.uedge.${f3utc}.nc
+ else
+ ncks -v xtime,uReconstructZonal,uReconstructMeridional $f3 analysis.uv.${f3utc}.nc
+ ls -l analysis.uv.${f3utc}.nc
+ endif
+ if ( $save_analysis == true ) then
+ set f3out = mpas_anal.${f3utc}.nc
+ ${MOVE} $f3 ${f3out}
+ endif
+
+ if ( $save_diag_plev == true ) then
+ set f3out = ${fremove}.${curr_utc}.nc
+ set f3anl = `echo ${f3out} | sed -e 's/:/\./g'`
+ set vlist = `ncdump -h ${f3anl} | grep hPa | awk '{print $2}' | cut -d "(" -f1`
+ set vstr = ""
+ foreach v ( $vlist )
+ set vstr = `echo $vstr,$v`
+ end
+ set vars = `echo $vstr | cut -c2-`
+ set var2d = "u10,v10,q2,t2m,th2m,skintemp"
+ set vars = `echo xtime,$vars,$var2d`
+ ncks -v $vars ${f3anl} -O anal_diag_plevs.${f3utc}.nc
+
+ set f3utc = `echo ${targ_utc} 0 | advance_time`
+ set f3out = ${fremove}.${targ_utc}.nc
+ set f3run = `echo ${f3out} | sed -e 's/:/\./g'`
+ if(! -e ${f3run}) then
+ echo Trying to save diagnostic variables at pressure levels,
+ echo but ${f3run} does not exist. Stop.
+ exit
+ endif
+ set var2d = "u10,v10,q2,t2m,th2m,skintemp,rainc,rainnc,hpbl,hfx,qfx,lh"
+ set vars = `echo xtime,$vars,$var2d`
+ ncks -v $vars ${f3run} -O fcst_diag_plevs.${f3utc}.nc
+ endif
+
+ ${REMOVE} ${f3pio} ${fremove}.*.nc
+
+ #-------------------------------------------------------------------
# Block 4: Convert your model output to a DART format ics file,
# then move it back to CENTRALDIR
# We also want to keep $f3 for the next cycle under this
- # temp directory (w/ delete_temp_dir = false).
+ # temp directory
#-------------------------------------------------------------------
- if ( $is_restart == true ) ${REMOVE} ${f3new}
- if ( $save_analysis == true ) ${MOVE} $f3 ${f3new}
-
# Overwrite the analysis file with the forecast at target time (for the next cycle).
- ${MOVE} $fout $f3 || exit 5
+ if ( $save_forecast == true ) then
+ set futc = `echo ${targ_utc} 0 | advance_time`
+ set fsav = mpas_fcst.${futc}.nc
+ echo ${COPY} $fout $fsav
+ ${COPY} $fout $fsav || exit 5
+ endif
+ ${MOVE} $fout $f3 || exit 6
${CENTRALDIR}/model_to_dart >&! out.model_to_dart.${date_utc}
- ${MOVE} $f2 ${CENTRALDIR}/$output_file || exit 4
+ ${MOVE} $f2 ${CENTRALDIR}/$output_file || exit 7
- # FIXME: Do we want to clean up the directory before moving up?
- ${REMOVE} log.*
-
# Change back to original directory.
cd $CENTRALDIR
- # delete the temp directory for each member if desired
- # If all goes well, there should be no need to keep this directory.
- # If you are debugging, you may want to keep this directory.
-
- if ( $delete_temp_dir == true ) ${REMOVE} $temp_dir
echo "Ensemble Member $ensemble_member completed"
# and now repeat the entire process for any other ensemble member that
Modified: DART/branches/development/models/mpas_atm/shell_scripts/advance_model.template.lsf
===================================================================
--- DART/branches/development/models/mpas_atm/shell_scripts/advance_model.template.lsf 2013-05-31 15:21:49 UTC (rev 6212)
+++ DART/branches/development/models/mpas_atm/shell_scripts/advance_model.template.lsf 2013-05-31 18:49:04 UTC (rev 6213)
@@ -4,7 +4,7 @@
#BSUB -o logs/JOB_NAME.%J.log
#BSUB -P PROJ_NUMBER
#BSUB -W 00:10
-#BSUB -q standby
+#BSUB -q QUEUE
#BSUB -n 32
#BSUB -x
#==================================================================
Added: DART/branches/development/models/mpas_atm/shell_scripts/create_filter_ics.csh
===================================================================
--- DART/branches/development/models/mpas_atm/shell_scripts/create_filter_ics.csh (rev 0)
+++ DART/branches/development/models/mpas_atm/shell_scripts/create_filter_ics.csh 2013-05-31 18:49:04 UTC (rev 6213)
@@ -0,0 +1,57 @@
+#!/bin/csh
+##############################################################################################
+# create_filter_ics.csh
+# To create initial ensemble in the DART binary format,
+# run model_to_dart over the mpas_init.nc for each member at the initial cycle time.
+#
+# So-Young Ha (MMM/NCAR)
+##############################################################################################
+# USER SPECIFIED PARAMETERS
+##############################################################################################
+set expname = MPAS_DART_test
+set init_cyc = 2008080112 # initial cycle time
+set f_mpas = mpas_init.2008-08-01_12.00.00.nc # mpas forecast file name
+set dir_fcst = /glade/scratch/syha/MPAS/INIT_PERT # ensemble forecast directory
+set dir_out = /glade/scratch/syha/MPAS_DART/$expname/${init_cyc} # output directory
+set dir_dart = /glade/scratch/syha/DART/branch_dev/models/mpas_atm/work # dart executables
+
+set n = 96 # ensemble size
+set frst = filter_ics # restart_in_file_name in &filter_nml in input.nml
+##############################################################################################
+# END OF USER SPECIFIED PARAMETERS
+##############################################################################################
+
+set infn = `grep model_analysis_filename input.nml | awk '{print $3}' | cut -d ',' -f1 | sed -e "s/'//g"`
+set outfn = `grep model_to_dart_output_file input.nml | awk '{print $3}' | cut -d ',' -f1 | sed -e "s/'//g"`
+
+foreach f ( $infn $frst model_to_dart.log )
+ if(-e $f) \rm -f $f
+end
+if(! -d $dir_out) mkdir -p $dir_out
+cd $dir_out
+
+if(! -e input.nml) \cp -f $dir_dart/input.nml .
+
+set i = 1
+while ( $i <= $n )
+ set fsrc = $dir_fcst/ENS_$i/$f_mpas
+ if(! -e $fsrc) then
+ echo $fsrc does not exist. Stop.
+ exit
+ endif
+ echo ln -sf $fsrc $infn
+ ln -sf $fsrc $infn
+ $dir_dart/model_to_dart >>&! model_to_dart.e$i.log
+
+ if(! -e $outfn) then
+ Error in model_to_dart for ensemble $i. Stop.
+ exit
+ endif
+ set icnum = `echo $i + 10000 | bc | cut -b2-5`
+ cat $outfn >! $dir_out/$frst.${icnum}
+
+ set ff = `echo $infn | cut -d . -f1`
+ ln -sf $fsrc ${ff}.e${i}.nc
+@ i++
+end
+
Property changes on: DART/branches/development/models/mpas_atm/shell_scripts/create_filter_ics.csh
___________________________________________________________________
Added: svn:executable
+ *
Added: svn:mime-type
+ text/plain
Added: svn:keywords
+ Date Rev Author HeadURL Id
Added: svn:eol-style
+ native
Added: DART/branches/development/models/mpas_atm/shell_scripts/driver_mpas_dart.csh
===================================================================
--- DART/branches/development/models/mpas_atm/shell_scripts/driver_mpas_dart.csh (rev 0)
+++ DART/branches/development/models/mpas_atm/shell_scripts/driver_mpas_dart.csh 2013-05-31 18:49:04 UTC (rev 6213)
@@ -0,0 +1,850 @@
+#!/bin/csh
+##############################################################################################
+#
+# driver_mpas_dart.csh
+#
+# THIS IS A TOP-LEVEL DRIVER SCRIPT FOR CYCLING RUN.
+# BOTH THE ENSEMBLE KALMAN FILTER AND THE MPAS FORECAST ARE RUN IN AN MPI VERSION.
+#
+# This is a sample script for cycling runs in a retrospective case study,
+# and was tested on NCAR IBM Supercomputer (yellowstone) using a "bsub" command.
+#
+# Note:
+# 1. This script does NOT specify all the options available for EnKF data assimilation.
+# For your own complete filter design, you need to edit your template input.nml
+# for the parameters which are not set up in the "Assimilation parameters" section below.
+# You may want to edit at least &filter_nml, &obs_kind_nml, &model_nml, &location_nml
+# and &mpas_vars_nml sections to confirm your filter configuration before running this script.
+# For adaptive inflation, we only allow the choice for spatialy-varying prior inflation here.
+# For more options, check DART/filter/filter.html and edit this script accordingly.
+# 2. An option for back-up in the hpss storage is supported here.
+# If failed in backup, all the outputs will be locally stored.
+# For such a case, check if you have enough disk space before running this script.
+# Depending on the sav_ options, you may end up storing huge ensemble data during the cycling.
+# 3. This script assumes that the initial ensemble (both in the DART binary format and the mpas
+# restart format), an mpas restart template file, and obs sequence files for the whole period
+# are available for the cycling run. More descriptions below.
+#
+# Required scripts to run this driver:
+# 1. namelist.input (for mpas) - a namelist template for the mpas model run.
+# 2. advance_model.csh (for mpas) - which makes the mpas forecast run during the cycling.
+# 3. input.nml (for filter) - a namelist template for filter.
+# 4. filter.template.lsf (for an mpi filter run; with async >= 2)
+# 5. advance_model.template.lsf (for an mpi mpas run; using separate nodes for each ensemble member)
+#
+# Input files to run this script:
+# A. RUN_DIR/${mpas_fname}.nc - an mpas template file (for mpas grid fields)
+# B. FG_DIR/${mpas_fname}.e#.nc - initial ensemble in mpas restart format.
+# Can be generated from mpas ensemble forecast valid at the initial cycle.
+# C. FG_DIR/${restart_in} - initial ensemble in dart binary format.
+# Converted from the initial mpas ensemble in B by create_filter_ics.csh.
+# D. OBS_DIR/obs_seq${YYYYMMDDHH} - obs sequence files for each analysis cycle (YYYYMMDDHH) for the
+# whole period (from ${date_ini} to ${date_end}).
+#
+# Written by So-Young Ha (MMM/NCAR) Sep-30-2011
+# Updated and tested on yellowstone by So-Young Ha (MMM/NCAR) Feb-20-2013
+#
+# For any questions or comments, contact: syha at ucar.edu (+1-303-497-2601)
+#
+##############################################################################################
+# USER SPECIFIED PARAMETERS
+##############################################################################################
+# Experiment name and the cycle period
+#--------------------------------------------------------------------------
+set expname = MPAS_DART_test # experiment name
+set init_dir = 2008080112 # initial date directory for filter_ics
+set date_ini = 2008-08-01_12:00:00 # initial cycle for this experiment
+set date_beg = 2008-08-01_12:00:00 # start date to run this script
+set date_end = 2008-08-31_12:00:00 # end date to run this script
+set intv_day = 0 # cycling frequency - assimilation_period_days in input.nml
+set intv_sec = 21600 # cycling frequency - assimilation_period_seconds in input.nml
+#--------------------------------------------------------------------------
+# Assimilation parameters (Logical parameters should be true or false (case-sensitive.))
+#--------------------------------------------------------------------------
+set nens = 80 # ensemble size for ens_size in input.nml
+set cutoff = 0.20 # horizontal location - cutoff in input.nml
+set horiz_dist_only = false # horizontal localization only - if true, edit &location_nml as well.
+set adaptive_inf = true # adaptive_inflation - If true, this script only supports
+ # spatially-varying state space prior inflation.
+ # And you also need to edit inf_sd_initial, inf_damping,
+ # inf_lower_bound, and inf_sd_lower_bound in &filter_nml.
+set single_restart = false # true if all copies read from and written to a single file in filter.
+set use_u_for_wind = false # Use normal velocity ('u') on edges for wind assimilation
+set update_u_from_reconstruct = true # Use reconstructed winds at cell center,
+ # then update normal velocity by the wind increments
+ # at cell center (not by filter).
+set output_ens_obs = true # Print out ensemble observations in the output obs sequence file.
+#--------------------------------------------------------------------------
+# Configuration for MPAS cycling (for advance_model.csh and namelist.input)
+#--------------------------------------------------------------------------
+set sav_analysis = false # true if you want to save the ensemble analysis locally (in ENS_DIR).
+set sav_forecast = false # true if you want to save the ensemble forecast locally (in ENS_DIR).
+set sav_diag_prs = false # true if you want to save diagnostic variables at pressure levels (in ENS_DIR).
+set sst_update = true # true if config_sst_update = true in the model simulation
+set sst_interval = 01_00:00:00 # sst update interval when sst_update = true
+set sst_fname = sfc_update.nc # sst input file name when sst_update = true
+#--------------------------------------------------------------------------
+# Directories
+#--------------------------------------------------------------------------
+set DART_DIR = /glade/scratch/syha/DART/branch_dev/models/mpas_atm/work # where dart executables exist
+set MPAS_DIR = /glade/scratch/syha/MPAS/run # where all the aux files exist to run mpas forecast
+set RUN_DIR = /glade/scratch/syha/MPAS_DART/$expname # top-level working directory
+set FG_DIR = $RUN_DIR/$init_dir # where filter_ics exists for initial cycle
+set OBS_DIR = /glade/scratch/syha/OBS_SEQ/data # where all obs sequence files exist
+set ENS_DIR = advance_temp # where the background forecast exists for each member
+#--------------------------------------------------------------------------
+# File naming convention (for input.nml)
+#--------------------------------------------------------------------------
+set restart_in = filter_ics
+set restart_out = filter_restart
+set infl_in = prior_inflate_ics
+set infl_out = prior_inflate_restart
+set obs_seq_in = obs_seq.out
+set obs_seq_out = obs_seq.final
+set mpas_fname = mpas_init # both for the analysis and the grid files (without .nc)
+#--------------------------------------------------------------------------
+# BSUB setup on NCAR yellowstone (only if $bsub_in_ibm = yes)
+#--------------------------------------------------------------------------
+set bsub_in_ibm = yes # run on yellowstone? yes or no.
+set PROJ_NUMBER = P64000101 # Account key to submit filter and mpas jobs on yellowstone
+set time_filter = 01:10 # wall clock time for mpi filter runs
+set queue = small # quene for the batch job
+
+set hpss_save = yes # Backup in HPSS? yes or no. If yes, edit below.
+set hsidir = MPAS/CYCLE_TEST/$expname
+set HSICMD = 'hsi put -P'
+####################################################################################
+# END OF USER SPECIFIED PARAMETERS
+####################################################################################
+
+set REMOVE = '/bin/rm -rf'
+set COPY = 'cp -pf'
+set MOVE = 'mv -f'
+set LINK = 'ln -sf'
+unalias cd
+unalias ls
+
+if(! -e $RUN_DIR) mkdir -p $RUN_DIR
+cd $RUN_DIR
+
+#------------------------------------------
+# Check if we have all the necessary files.
+#------------------------------------------
+if(! -r input.nml) ${COPY} $DART_DIR/input.nml .
+if(! -r namelist.input) ${COPY} $MPAS_DIR/namelist.input .
+
+if ( ! -e advance_model.csh) then
+ echo ${COPY} $DART_DIR/../shell_scripts/advance_model.csh .
+ ${COPY} $DART_DIR/../shell_scripts/advance_model.csh .
+ if( ! $status == 0 ) then
+ echo ABORT\: We cannot find required script $fn.
+ endif
+endif
+
+# Update advance_model.csh
+cat >! config_restart.sed << EOF
+/set save_analysis/c\
+set save_analysis = ${sav_analysis}
+/set save_forecast /c\
+set save_forecast = ${sav_forecast}
+/set save_diag_plev /c\
+set save_diag_plev = ${sav_diag_prs}
+EOF
+if(-e advance_new.csh) ${REMOVE} advance_new.csh
+sed -f config_restart.sed advance_model.csh >! advance_new.csh
+${MOVE} advance_new.csh advance_model.csh
+chmod +x advance_model.csh
+
+# For an initial cycle, we already linked the mpas template files
+# thru create_filter_ics.csh. But if you run this script in the middle
+# of the cycling, you need to make sure that a correct background
+# forecast is linked in each ensemble member directory.
+
+set ie = 1
+while ( $ie <= $nens )
+if($date_ini == $date_beg) then
+ if(! -e ${FG_DIR}/${mpas_fname}.e${ie}.nc) then
+ echo We cannot find an initial ensemble for member ${ie}. Stop.
+ exit -1
+ endif
+ ${LINK} ${FG_DIR}/${mpas_fname}.e${ie}.nc
+else
+ set mpas_temp = ${ENS_DIR}${ie}/${mpas_fname}.nc
+ if(! -e $mpas_temp) then
+ echo We cannot find ${mpas_temp} to start with. Stop.
+ exit -1
+ endif
+ set time_ie = `ncdump -v xtime ${mpas_temp} | tail -2 | head -1 | awk '{print $1}' | cut -c2-`
+ if($time_ie != $date_beg) then
+ echo $mpas_temp should have the time $date_beg, but has $time_ie. Stop.
+ exit -1
+ endif
+endif
+@ ie++
+end
+
+if(! -e ${mpas_fname}.nc) ${COPY} ${FG_DIR}/${mpas_fname}.e1.nc ${mpas_fname}.nc
+
+
+foreach fn ( filter advance_time convertdate restart_file_tool dart_to_model model_to_dart )
+ if ( ! -x $fn ) then
+ echo ${LINK} $DART_DIR/$fn .
+ ${LINK} $DART_DIR/$fn .
+ if( ! $status == 0 ) then
+ echo ABORT\: We cannot find required executable dependency $fn.
+ exit -1
+ endif
+ endif
+end
+
+if($bsub_in_ibm == yes) then
+ foreach fn ( filter.template.lsf advance_model.template.lsf )
+ if ( ! -r $fn ) then
+ echo ${COPY} $DART_DIR/../shell_scripts/$fn .
+ ${COPY} $DART_DIR/../shell_scripts/$fn .
+ if( ! $status == 0 ) then
+ echo ABORT\: We cannot find required readable dependency $fn.
+ exit -1
+ endif
+ endif
+ end
+endif
+
+# Preparation for the model run
+if ( -e MPAS_RUN ) ${REMOVE} MPAS_RUN
+if (! -d $MPAS_DIR ) then
+ echo $MPAS_DIR does not exist. Stop.
+ exit -1
+endif
+${LINK} $MPAS_DIR MPAS_RUN
+
+if ( $sst_update == true ) then
+ cat >! namelist.sed << EOF
+ /config_sfc_update_interval /c\
+ config_sfc_update_interval = '${sst_interval}'
+ /config_sfc_update_name /c\
+ config_sfc_update_name = '${sst_fname}'
+/config_sst_update /c\
+ config_sst_update = .true.
+EOF
+
+sed -f namelist.sed namelist.input >! namelist.input.sst_update
+if(! -z namelist.input.sst_update) then
+ ${MOVE} namelist.input.sst_update namelist.input
+else
+ echo ABORT\: Failed in updating namelist.input for config_sst_update.
+ exit -1
+endif
+${LINK} ${MPAS_DIR}/${sst_fname} . || exit 1
+
+else
+ echo NO SST_UPDATE...
+endif
+
+if ( ! -d logs ) mkdir logs # to print out log files
+${COPY} input.nml input.nml.template # Need to update input.nml with user-specified options
+
+#------------------------------------------
+# Time info
+#------------------------------------------
+set greg_ini = `echo $date_ini 0 -g | advance_time`
+set greg_beg = `echo $date_beg 0 -g | advance_time`
+set greg_end = `echo $date_end 0 -g | advance_time`
+set intv_hr = `expr $intv_sec \/ 3600`
+set intv_dh = `expr $intv_day \* 24`
+ @ intv_hr += $intv_dh
+set diff_day = `expr $greg_end[1] \- $greg_beg[1]`
+set diff_sec = `expr $greg_end[2] \- $greg_beg[2]`
+set diff_tot = `expr $diff_day \* 86400 \+ $diff_sec`
+set n_cycles = `expr $diff_tot \/ $intv_sec \+ 1`
+echo Total of $n_cycles cycles from $date_beg to $date_end will be run every $intv_hr hr.
+if($n_cycles < 0) then
+ echo Cannot figure out how many cycles to run. Check the time setup.
+ exit
+endif
+
+echo Running at $RUN_DIR
+echo " "
+
+set icyc = 1
+if($date_beg != $date_ini) then
+ set init_day = `expr $greg_beg[1] \- $greg_ini[1]`
+ set init_sec = `expr $greg_beg[2] \- $greg_ini[2]`
+ set init_dif = `expr $init_day \* 86400 \+ $init_sec`
+ set icyc = `expr $init_dif \/ $intv_sec \+ 1`
+else
+ if( -d advance_temp1) then
+ echo We start new experiment now.
+ echo Cleaning up advance_temp directories first...
+ \rm -fR advance_temp*
+ endif
+endif
+set ncyc = `expr $icyc \+ $n_cycles \- 1`
+
+
+#--------------------------------------------------------
+# Cycling gets started
+#--------------------------------------------------------
+set time_ini = `echo $date_ini 0 | advance_time` #YYYYMMDDHH
+set time_anl = `echo $date_beg 0 | advance_time` #YYYYMMDDHH
+set time_end = `echo $date_end 0 | advance_time` #YYYYMMDDHH
+
+set first_cycle = $icyc
+while ( $icyc <= $ncyc )
+
+ set time_pre = `echo $time_anl -$intv_hr | advance_time` #YYYYMMDDHH
+ set time_nxt = `echo $time_anl +$intv_hr | advance_time` #YYYYMMDDHH
+ set greg_obs = `echo $time_anl 0 -g | advance_time`
+ set greg_obs_days = $greg_obs[1]
+ set greg_obs_secs = $greg_obs[2]
+ echo Cycle $icyc at $time_anl\: ${greg_obs_days}_${greg_obs_secs}
+
+ #------------------------------------------------------
+ # 1. Namelist setup
+ #------------------------------------------------------
+ cat >! script.sed << EOF
+ /ens_size /c\
+ ens_size = $nens,
+ /cutoff /c\
+ cutoff = $cutoff,
+ /horiz_dist_only/c\
+ horiz_dist_only = .${horiz_dist_only}.,
+ /start_from_restart/c\
+ start_from_restart = .true.,
+ /init_time_days/c\
+ init_time_days = -1,
+ /init_time_seconds/c\
+ init_time_seconds = -1,
+ /first_obs_days/c\
+ first_obs_days = -1,
+ /first_obs_seconds/c\
+ first_obs_seconds = -1,
+ /last_obs_days/c\
+ last_obs_days = -1,
+ /last_obs_seconds/c\
+ last_obs_seconds = -1,
+ /model_analysis_filename/c\
+ model_analysis_filename = '${mpas_fname}.nc',
+ /grid_definition_filename/c\
+ grid_definition_filename = '${mpas_fname}.nc',
+ /output_state_vector /c\
+ output_state_vector = .false.,
+ /assimilation_period_days/c\
+ assimilation_period_days = $intv_day,
+ /assimilation_period_seconds/c\
+ assimilation_period_seconds = $intv_sec,
+ /single_restart_file_in/c\
+ single_restart_file_in = .$single_restart.,
+ /single_restart_file_out/c\
+ single_restart_file_out = .$single_restart.,
+ /advance_time_present/c\
+ advance_time_present = .true.,
+EOF
+
+ if( $adaptive_inf == true ) then # For a spatially-varying prior inflation.
+
+ cat >! script1.sed << EOF
+ /inf_flavor /c\
+ inf_flavor = 2, 0,
+EOF
+
+ if($icyc == 1) then
+ cat >! script2.sed << EOF
+ /inf_initial_from_restart/c\
+ inf_initial_from_restart = .false., .false.,
+ /inf_sd_initial_from_restart/c\
+ inf_sd_initial_from_restart = .false., .false.,
+EOF
+ else
+ cat >! script2.sed << EOF
+ /inf_initial_from_restart/c\
+ inf_initial_from_restart = .true., .true.,
+ /inf_sd_initial_from_restart/c\
+ inf_sd_initial_from_restart = .true., .true.,
+EOF
+ endif
+
+ cat script1.sed >> script.sed
+ cat script2.sed >> script.sed
+
+ else # turn off the adaptive inflation in prior
+
+ cat >! script1.sed << EOF
+ /inf_flavor /c\
+ inf_flavor = 0, 0,
+EOF
+ cat script1.sed >> script.sed
+
+ endif
+
+ if( $use_u_for_wind == true ) then
+ set is_use_u_there = `grep use_u_for_wind input.nml.template | wc -l`
+ set is_u_there = `grep KIND_EDGE_NORMAL_SPEED input.nml.template | wc -l`
+
+ if($is_use_u_there == 0) then
+ echo No use_u_for_wind in your input.nml.template.
+ exit -1
+ endif
+ if($is_u_there == 0) then
+ echo No KIND_EDGE_NORMAL_SPEED in your input.nml.template.
+ exit -1
+ endif
+
+ cat >! script3.sed << EOF
+ /use_u_for_wind/c\
+ use_u_for_wind = .true.,
+EOF
+
+ cat script3.sed >> script.sed
+ endif
+
+ set is_update_u_there = `grep update_u_from_reconstruct input.nml.template | wc -l`
+
+ if($is_update_u_there == 0) then
+ echo No update_u_from_reconstruct in your input.nml.template.
+ exit -1
+ endif
+
+ cat >! scriptu.sed << EOF
+ /update_u_from_reconstruct/c\
+ update_u_from_reconstruct = .${update_u_from_reconstruct}.,
+EOF
+ cat scriptu.sed >> script.sed
+
+ set nobs = 0
+ if( $output_ens_obs == true ) set nobs = $nens
+ cat >! script4.sed << EOF
+ /num_output_obs_members/c\
+ num_output_obs_members = $nobs,
+EOF
+ cat script4.sed >> script.sed
+
+ ${REMOVE} input.nml
+ sed -f script.sed input.nml.template >! input.nml || exit 2
+
+ #------------------------------------------------------
+ # 2. Link to a restart file to get filter started
+ # (assuming start_from_restart = .true. in input.nml)
+ #------------------------------------------------------
+
+ if($icyc == 1) then
+ set dir_rst = $FG_DIR
+ else
+ set dir_rst = $RUN_DIR/${time_anl}
+ endif
+ set fn_rst = $dir_rst/${restart_in}
+
+ if($single_restart == true) then
+ if(! -e $fn_rst) then
+ echo $fn_rst does not exist. Stop.
+ exit
+ else
+ ${LINK} ${fn_rst} ${restart_in}
+ endif
+ else
+ set i = 1
+ while ( $i <= $nens )
+ set icnum = `echo $i + 10000 | bc | cut -b2-5`
+ if(! -e ${fn_rst}.${icnum}) then
+ echo ${fn_rst}.${icnum} does not exist. Stop.
+ exit
+ else
+ ${LINK} ${fn_rst}.${icnum} ${restart_in}.${icnum}
+ endif
+ @ i++
+ end
+ endif
+
+ if( $adaptive_inf == true && $icyc > 1 ) then
+ if(! -e $RUN_DIR/${time_pre}/${infl_out}) then
+ echo $RUN_DIR/${time_pre}/${infl_out} does not exist. Stop.
+ exit
+ endif
+ ${LINK} $RUN_DIR/${time_pre}/${infl_out} ${infl_in}
+ endif
+
+ #------------------------------------------------------
+ # 3. Obs sequence for this analysis cycle - one obs time at each analysis cycle
+ #------------------------------------------------------
+ #set fn_obs = $OBS_DIR/${time_anl}/obs_seq.out
+ set fn_obs = $OBS_DIR/obs_seq${time_anl}
+ if(! -e $fn_obs) then
+ echo $fn_obs does not exist. Stop.
+ exit
+ endif
+ ${LINK} $fn_obs ${obs_seq_in}
+
+ #------------------------------------------------------
+ # 4. Run filter
+ #------------------------------------------------------
+ set job_name = ${expname}.${icyc}
+ echo Running filter: $job_name
+
+ if($bsub_in_ibm == yes) then
+
+ cat >! filter.sed << EOF
+ s#JOB_NAME#${job_name}#g
+ s#PROJ_NUMBER#${PROJ_NUMBER}#g
+ s#NENS#${nens}#g
+ s#JOB_TIME#${time_filter}#g
+ s#QUEUE#${queue}#g
+EOF
+
+ sed -f filter.sed filter.template.lsf >! filter.lsf
+ bsub < filter.lsf
+
+ # Wait until the job is finished.
+ set is_there = `bjobs -w | grep $job_name | wc -l`
+ while ( $is_there != 0 )
+ sleep 60
+ set is_there = `bjobs -w | grep $job_name | wc -l`
+ end
+
+ else
+
+ echo `date +%s` >&! filter_started
+ ./filter >! filter.log
+ if ( -e obs_seq.final ) touch filter_done
+
+ endif
+
+ # Check errors in filter.
+ if ( -e filter_started && ! -e filter_done ) then
@@ Diff output truncated at 40000 characters. @@
More information about the Dart-dev
mailing list