[Dart-dev] [3376] DART/trunk/models/MITgcm_ocean/shell_scripts: job.simple. csh is interactive - launches runme_filter (async=4)

nancy at ucar.edu nancy at ucar.edu
Thu May 22 09:51:29 MDT 2008


An HTML attachment was scrubbed...
URL: http://mailman.ucar.edu/pipermail/dart-dev/attachments/20080522/c42724b1/attachment-0001.html
-------------- next part --------------
Added: DART/trunk/models/MITgcm_ocean/shell_scripts/MakeInitialEnsemble.csh
===================================================================
--- DART/trunk/models/MITgcm_ocean/shell_scripts/MakeInitialEnsemble.csh	                        (rev 0)
+++ DART/trunk/models/MITgcm_ocean/shell_scripts/MakeInitialEnsemble.csh	2008-05-22 15:51:29 UTC (rev 3376)
@@ -0,0 +1,69 @@
+#!/bin/tcsh
+#
+# Data Assimilation Research Testbed -- DART
+# Copyright 2004-2008, Data Assimilation Research Section, 
+# University Corporation for Atmospheric Research
+# Licensed under the GPL -- www.gpl.org/licenses/gpl.html
+#
+# $Id: runmodel_1x 2799 2007-04-04 23:17:51Z thoar $
+#
+#=============================================================================
+# So the background is that we ran the model for 14 days with a 900 second
+# timestep - starting 1996 01 01 ??Z 
+#
+# I wanted 20 ensemble members.
+#
+# I output snapshot files every 12 hours to generate 29 sets of snapshots.
+# We're just going to make the first snapshot into ensemble member 1. 
+# Second snapshot will be ensemble member 2 ... I'm just not going to use
+# the last snapshots. This may not be the greatest idea - I don't know
+# how much spread we will have to start. There must be SOME ...
+# depends on the nonlinearity of the model.
+#
+# I modified trans_pv_sv.f90 to ignore the timestamps 
+#
+# Repeatedly call trans_pv_sv to turn each snapshot into a 
+# DART initial condition. The first (line - for ascii files) or 
+# (4+4+4+4 = 16 bytes - for binary files) contain the timestamp.
+# We need the timestamp for the first one to be saved and used for all
+# subsequent 'members'. Simply 'cat' them together at the end.
+# There is no problem with having more ensemble members in a DART
+# initial condition file ... but there is a big problem if you don't have
+# enough ...
+#
+# TJH - 21 May 2008
+
+# trans_pv_sv needs three namelist files:  data, data.cal and input.nml
+
+set DARTROOT = /fs/image/home/thoar/SVN/DART/models/MITgcm_ocean
+cp -p ${DARTROOT}/inputs/data      .
+cp -p ${DARTROOT}/inputs/data.cal  .
+cp -p ${DARTROOT}/work/input.nml   .
+
+@ memcount = 0
+
+foreach FILE ( T.*.data )
+   @ memcount = $memcount + 1
+
+   set FILEBASE = $FILE:r
+   set TIMESTEP = $FILEBASE:e
+   echo "Converting snapshot timestep $TIMESTEP ..."
+
+   echo $TIMESTEP | ${DARTROOT}/work/trans_pv_sv
+
+   if ( $memcount == 1) then
+   #  head -1 assim_model_state_ud >! timestamp
+   else
+   #  set nlines = `wc -l assim_model_state_ud`
+   endif
+
+   if ( $memcount < 10 ) then
+      set OFNAME = ens_mem_00$memcount
+   else if ( $memcount < 10 ) then
+      set OFNAME = ens_mem_0$memcount
+   else
+      set OFNAME = ens_mem_$memcount
+   endif
+
+   mv assim_model_state_ud $OFNAME
+end


Property changes on: DART/trunk/models/MITgcm_ocean/shell_scripts/MakeInitialEnsemble.csh
___________________________________________________________________
Name: svn:executable
   + *

Modified: DART/trunk/models/MITgcm_ocean/shell_scripts/advance_model.csh
===================================================================
--- DART/trunk/models/MITgcm_ocean/shell_scripts/advance_model.csh	2008-05-22 15:46:48 UTC (rev 3375)
+++ DART/trunk/models/MITgcm_ocean/shell_scripts/advance_model.csh	2008-05-22 15:51:29 UTC (rev 3376)
@@ -1,4 +1,4 @@
-#!/bin/csh
+#!/bin/tcsh
 #
 # Data Assimilation Research Testbed -- DART
 # Copyright 2004-2007, Data Assimilation Research Section
@@ -11,13 +11,11 @@
 # $Revision$
 # $Date$
 
-# Standard script for use in assimilation applications
+# Script for use in assimilation applications
 # where the model advance is executed as a separate process.
-# Can be used with most low-order models and the bgrid model which
-# can be advanced using the integrate_model executable.
 
 # This script copies the necessary files into the temporary directory
-# and then executes the fortran program integrate_model.
+# and then executes the model
 
 # Arguments are the 
 # 1) process number of caller, 
@@ -27,24 +25,62 @@
 set num_states = $2
 set control_file = $3
 
+echo "process      is $process"
+echo "num_states   is $num_states"
+echo "control_file is $control_file"
+
 # Get unique name for temporary working directory for this process's stuff
 set temp_dir = 'advance_temp'${process}
+echo "temp_dir is $temp_dir"
 
 # Create a clean temporary directory and go there
 \rm -rf  $temp_dir
 mkdir -p $temp_dir
 cd       $temp_dir
 
-# Get files needed to run the ocean model
-cp -p ../eedata ../topog.bin ../theta.bin ../salt.bin ../SST.bin ../SSS.bin .  || exit 1
+# Copy the namelist files - these are small, so we really copy them
+foreach FILE ( data data.cal data.exf data.kpp \
+               data.obcs data.pkg eedata )
+   cp -pv ../inputs/$FILE .
+end
 
-# Get namelist files controlling run-time behavior
-cp -p ../data ../data.cal ../data.exf ../data.kpp \
-                         ../data.obcs ../data.pkg . || exit 2
+# copy the files used by data&PARM05 - input datasets
+# These get overwritten ... so maybe we dont actually copy them
+foreach FILE ( bathymetry.bin gom_H_199601.bin gom_S_199601.bin \
+             gom_T_199601.bin gom_U_199601.bin gom_V_199601.bin )
+#   cp -pv ../inputs/$FILE .
+end
 
-# Get files needed to run DART input.nml ... is it used by trans_sv_pv ...?
-cp ../input.nml . || exit 3
+# link the files used by data.exf&EXF_NML_02 - external forcings
+foreach FILE ( lev05_monthly_sss_relax.bin \
+               lev05_monthly_sst_relax.bin \
+               run-off.bin_1x1             \
+               ncep_air_19960101.bin       \
+               ncep_dlwrf_19960101.bin     \
+               ncep_dswrf_19960101.bin     \
+               ncep_nswrs_19960101.bin     \
+               ncep_prate_19960101.bin     \
+               ncep_shum_19960101.bin      \
+               ncep_uwnd_19960101.bin      \
+               ncep_vwnd_19960101.bin      )
+   ln -sf ../inputs/$FILE .
+end
 
+# link the files used by data.obcs&OBCS_PARM01 - open boundaries
+foreach FILE ( Rs_SobcsE_52_01_nPx1.bin    Rs_SobcsN_52_01_nPy1.bin \
+               Rs_TobcsE_52_01_nPx1.bin    Rs_TobcsN_52_01_nPy1.bin \
+               Rs_UobcsE_52_01_nPx1_c1.bin Rs_UobcsN_52_01_nPy1.bin \
+               Rs_VobcsE_52_01_nPx1.bin    Rs_VobcsN_52_01_nPy1_c1.bin)
+   ln -sf ../inputs/$FILE .
+end
+
+
+# Get files needed to run DART
+cp ../input.nml .
+
+echo 'listing now that the table has been set ...'
+ls -l
+
 # Loop through each state
 set state_copy = 1
 set ensemble_member_line = 1
@@ -66,12 +102,15 @@
    # Eta.YYYYMMDD.HHMMSS.[data,meta], and 
    # data.cal.new  ... which contains the appropriate startdate_1, startdate_2
    # so data&PARM05 will specify the input data files.
-   mv ../$input_file assim_model_state_ic
 
+   mv -v ../$input_file assim_model_state_ic
+
    ../trans_sv_pv
 
    # Update the MIT namelist output ... 
    # and rename the input files to those defined in the data&PARM05 namelist.
+   # FIXME - somehow the last ensemble member needs to copy the 
+   # data.cal.new back to the CENTRALDIR ... I think
 
    mv data.cal.new data.cal
 
@@ -99,25 +138,28 @@
    set FNAME = `echo  $FNAME | sed -e "s#'##g"`
    mv -v V.*.*.data $FNAME
 
-   set FNAME = `grep -i hydrogSaltFile data | sed -e "s#=##"`
-   set FNAME = `echo  $FNAME | sed -e "s#hydrogSaltFile##"`
+   set FNAME = `grep -i pSurfInitFile data | sed -e "s#=##"`
+   set FNAME = `echo  $FNAME | sed -e "s#pSurfInitFile##"`
    set FNAME = `echo  $FNAME | sed -e "s#,##g"`
    set FNAME = `echo  $FNAME | sed -e "s#'##g"`
    mv -v Eta.*.*.data $FNAME
 
+# set pattern="s/.*'\(.*\)'.*/\1/"
+# set zfilename=`echo $zoneline |  sed -e $pattern`
+#
 #  set  hydrogSaltFile=`sed -n -e  's/hydrogSaltFile=.\(.*\).,/\1/p' data`
 #  set hydrogThetaFile=`sed -n -e 's/hydrogThetaFile=.\(.*\).,/\1/p' data`
 #  set    uVelInitFile=`sed -n -e    's/uVelInitFile=.\(.*\).,/\1/p' data`
 #  set    vVelInitFile=`sed -n -e    's/vVelInitFile=.\(.*\).,/\1/p' data`
-#  set   thetaClimFile=`sed -n -e   's/thetaClimFile=.\(.*\).,/\1/p' data`
+#  set   pSurfInitFile=`sed -n -e   's/pSurfInitFile=.\(.*\).,/\1/p' data`
 #  mv   S.*.*.data  $hydrogSaltFile
 #  mv   T.*.*.data  $hydrogThetaFile
 #  mv   U.*.*.data  $uVelInitFile
 #  mv   V.*.*.data  $vVelInitFile
-#  mv Eta.*.*.data  $thetaClimFile
+#  mv Eta.*.*.data  $pSurfInitFile
 
    # Advance the model saving standard out
-   ./mitgcmuv >! integrate_model_out_temp
+   mpirun.lsf ../mitgcmuv_20p
 
    # Extract the timestep from the ocean model output files.
    set TIMESTEP = `ls -1 S.*.data`
@@ -126,9 +168,6 @@
 
    echo $TIMESTEP | ../trans_pv_sv
 
-   # Append the output from the advance to the file in the working directory
-   #cat integrate_model_out_temp >> ../integrate_model_out_temp$process
-
    # Move the updated state vector back to 'centraldir'
    mv assim_model_state_ud ../$output_file
 
@@ -139,6 +178,7 @@
 end
 
 # Change back to original directory and get rid of temporary directory
+ls -l > ../directory_contents_$ensemble_member
 cd ..
 \rm -rf $temp_dir
 

Added: DART/trunk/models/MITgcm_ocean/shell_scripts/job.simple.csh
===================================================================
--- DART/trunk/models/MITgcm_ocean/shell_scripts/job.simple.csh	                        (rev 0)
+++ DART/trunk/models/MITgcm_ocean/shell_scripts/job.simple.csh	2008-05-22 15:51:29 UTC (rev 3376)
@@ -0,0 +1,171 @@
+#!/bin/csh
+#
+# Data Assimilation Research Testbed -- DART
+# Copyright 2004-2007, Data Assimilation Research Section
+# University Corporation for Atmospheric Research
+# Licensed under the GPL -- www.gpl.org/licenses/gpl.html
+#
+# <next few lines under version control, do not edit>
+# $URL: http://subversion.ucar.edu/DAReS/DART/trunk/models/cam/shell_scripts/job.simple.csh $
+# $Id: job.simple.csh 2691 2007-03-11 18:18:09Z thoar $
+# $Revision: 2691 $
+# $Date: 2007-03-11 12:18:09 -0600 (Sun, 11 Mar 2007) $
+
+#-----------------------------------------------------------------------------
+# job.simple.csh ... Top level script to run a single assimilation experiment.
+#
+#  Unlike the more complex job.csh, this script only processes a single 
+#  observation file.  Still fairly complex; requires a raft of
+#  data files and most of them are in hardcoded locations.
+#
+# You need to know which of several batch systems you are using.  The most
+# common one is LSF.   PBS is also common.  (POE is another but is
+# not supported directly by this script.  It is not recommended that you have a
+# parallel cluster without a batch system (it schedules which nodes are assigned
+# to which processes) but it is possible to run that way -- you have to do
+# more work to get the information about which nodes are involved to the 
+# parallel tasks -- but anyway, there is a section below that uses ssh and no
+# batch.
+#
+# How to submit this job:
+#  1. Look at the #BSUB or #PBS sections below and adjust any of the parameters
+#     on your cluster.  Queue names are very system specific; some systems 
+#     require wall-clock limits; some require an explicit charge code.
+#  2. Submit this script to the queue:
+#        LSF:   bsub < job.simple.csh
+#        PBS:   qsub job.simple.csh
+#       NONE:   job.simple.csh
+#
+# The script moves the necessary files to the current directory and then
+# starts 'filter' as a parallel job on all nodes; each of these tasks will 
+# call some a separate model_advance.csh when necessary.
+#
+# The central directory is where the scripts reside and where script and 
+# program I/O are expected to happen.
+#-----------------------------------------------------------------------------
+
+set CENTRALDIR = `pwd`
+set experiment = DARTMIT
+alias submit 'bsub < \!*'
+
+set myname = $0     # this is the name of this script
+
+# some systems don't like the -v option to any of the following 
+
+set OSTYPE = `uname -s` 
+switch ( ${OSTYPE} )
+   case IRIX64:
+      setenv REMOVE 'rm -rf'
+      setenv   COPY 'cp -p'
+      setenv   MOVE 'mv -f'
+      breaksw
+   case AIX:
+      setenv REMOVE 'rm -rf'
+      setenv   COPY 'cp -p'
+      setenv   MOVE 'mv -f'
+      breaksw
+   default:
+      setenv REMOVE 'rm -rvf'
+      setenv   COPY 'cp -vp'
+      setenv   MOVE 'mv -fv'
+      breaksw
+endsw
+
+echo " "
+echo "Running $experiment on host "`hostname`
+echo "Initialized at "`date`
+echo "CENTRALDIR is "`pwd`
+
+#-----------------------------------------------------------------------------
+# Set variables containing various directory names where we will GET things
+#-----------------------------------------------------------------------------
+
+set DARTDIR = /fs/image/home/${user}/SVN/DART
+set DARTMITDIR = ${DARTDIR}/models/MITgcm_ocean
+set MITDATADIR = ${DARTMITDIR}/inputs
+
+#-----------------------------------------------------------------------------
+# Get the DART/MIT executables and scripts
+#-----------------------------------------------------------------------------
+
+${COPY} ${DARTMITDIR}/work/filter                     .
+${COPY} ${DARTMITDIR}/work/wakeup_filter              .
+${COPY} ${DARTMITDIR}/work/trans_pv_sv                .
+${COPY} ${DARTMITDIR}/work/trans_sv_pv                .
+${COPY} ${DARTMITDIR}/work/mitgcmuv_20p               .
+${COPY} ${DARTMITDIR}/shell_scripts/advance_model.csh .
+${COPY} ${DARTMITDIR}/shell_scripts/runme_filter      .
+
+#-----------------------------------------------------------------------------
+# Get the necessary data files -- this is the hard part.
+# This script does not 'cold start' the ocean model, nor spin up DART.
+#-----------------------------------------------------------------------------
+
+if (-d inputs) then
+   echo "using existing 'inputs' directory"
+else
+   echo "Making 'inputs' directory"
+   mkdir inputs
+endif
+${COPY} ${MITDATADIR}/*                               inputs
+${COPY} ${DARTMITDIR}/work/obs_seq.out                .
+${COPY} ${DARTMITDIR}/work/filter_ics                 .
+${COPY} ${DARTMITDIR}/work/input.nml                  .
+cp -pv inputs/data                                    .
+cp -pv inputs/data.cal                                .
+
+#-----------------------------------------------------------------------------
+# Ensure the (output) experiment directory exists
+# All the  MIT-related files will get put in ${experiment}/MIT
+# All the DART-related files will get put in ${experiment}/DART
+#-----------------------------------------------------------------------------
+
+if (-d ${experiment}) then
+   echo "${experiment} already exists"
+else
+   echo "Making run-time directory ${experiment} ..."
+   mkdir -p ${experiment}
+endif
+mkdir -p ${experiment}/{MIT,DART}
+
+#-----------------------------------------------------------------------------
+# Runs filter which integrates the results of model advances  (async=4).
+#-----------------------------------------------------------------------------
+
+submit runme_filter
+
+echo "Finished at "`date`
+
+exit
+
+#-----------------------------------------------------------------------------
+# Move the output to storage after filter completes.
+# At this point, all the restart,diagnostic files are in the CENTRALDIR
+# and need to be moved to the 'experiment permanent' directory.
+# We have had problems with some, but not all, files being moved
+# correctly, so we are adding bulletproofing to check to ensure the filesystem
+# has completed writing the files, etc. Sometimes we get here before
+# all the files have finished being written.
+#-----------------------------------------------------------------------------
+
+echo "Listing contents of CENTRALDIR before archiving"
+ls -l
+
+${MOVE} *.data *.meta         ${experiment}/MIT
+${MOVE} STD*                  ${experiment}/MIT
+
+${MOVE} filter_restart*            ${experiment}/DART
+${MOVE} assim_model_state_ud[1-9]* ${experiment}/DART
+${MOVE} assim_model_state_ic[1-9]* ${experiment}/DART
+${MOVE} Posterior_Diag.nc          ${experiment}/DART
+${MOVE} Prior_Diag.nc              ${experiment}/DART
+${MOVE} obs_seq.final              ${experiment}/DART
+${MOVE} dart_log.out               ${experiment}/DART
+
+# Good style dictates that you save the scripts so you can see what worked.
+
+${COPY} input.nml                  ${experiment}/DART
+${COPY} *.csh                      ${experiment}/DART
+${COPY} $myname                    ${experiment}/DART
+
+ls -lrt


Property changes on: DART/trunk/models/MITgcm_ocean/shell_scripts/job.simple.csh
___________________________________________________________________
Name: svn:executable
   + *

Modified: DART/trunk/models/MITgcm_ocean/shell_scripts/runme_filter
===================================================================
--- DART/trunk/models/MITgcm_ocean/shell_scripts/runme_filter	2008-05-22 15:46:48 UTC (rev 3375)
+++ DART/trunk/models/MITgcm_ocean/shell_scripts/runme_filter	2008-05-22 15:51:29 UTC (rev 3376)
@@ -28,12 +28,12 @@
 ##=============================================================================
 #BSUB -J filter
 #BSUB -o filter.%J.log
-#BSUB -q regular
-#BSUB -n 16
-##BSUB -P nnnnnnnn
-#BSUB -W 1:00
+#BSUB -q dedicated
+#BSUB -n 20
+#BXXX -P 868500xx
+#BSUB -W 12:00
+#BSUB -N -u ${USER}@ucar.edu
 #
-#
 ##=============================================================================
 ## This block of directives constitutes the preamble for the PBS queuing system
 ## PBS is used on the CGD   Linux cluster 'bangkok'
@@ -56,8 +56,8 @@
 #PBS -r n
 #PBS -e filter.err
 #PBS -o filter.log
-#PBS -q medium
-#PBS -l nodes=16:ppn=2
+#PBS -q dedicated
+#PBS -l nodes=10:ppn=2
 
 # if async=2, e.g. you are going to run './mitgcmuv', single process
 # (or possibly 'mpirun -np 1 ./mitgcmuv'), so each processor advances 
@@ -73,7 +73,7 @@
 
 set parallel_model = "true"
 
-set num_ens = 16
+set num_ens = 20
 
 # A common strategy for the beginning is to check for the existence of
 # some variables that get set by the different queuing mechanisms.


More information about the Dart-dev mailing list