[Dart-dev] [4384] DART/trunk/models/cam/full_experiment: Updates from Kevin Raeder:

nancy at ucar.edu nancy at ucar.edu
Thu Jun 3 11:16:03 MDT 2010


Revision: 4384
Author:   nancy
Date:     2010-06-03 11:16:03 -0600 (Thu, 03 Jun 2010)
Log Message:
-----------
Updates from Kevin Raeder:

full_experiment/analyses2initial.csh
  Check the CLM files for the existence/use of the FillValue attribute.
  If missing, add it to the appropriate variables of the whole ensemble.
  This allows ncea to correctly average fields which have a spval, but which is
  not identified as such.  The list of fields having spvals,
  ${CENTRALDIR}/clm_FillValue_fields, comes from clm_FillValue_vars.csh.
  Accomodate both CLM 3.5 and 3.6+ by testing for the existence of the
  additional fields in 3.6+.
  Archive the ensemble average of the iceinput_#.nc files as the "analysis" for the CICE model.
  Archive member 1 of the CICE and CLM restart files for comparison with ens. avg.

full_experiment/clm_FillValue_vars.csh
  A new script to search a CLM restart(/initial) NetCDF file for variables
  which have the FillValue attribute.  This list is needed by analyses2initial.csh
  in order to correctly generate the ensemble average of the CLM files.
  I can handle any number of spvals and fields which use them.  The spvals are
  specified in the script.

full_experiment/job_mpi.csh
  Smoother handling of nlevs for cam3.5 vs. cam3.6.
  Adapted to CICE restart files being available as NetCDF instead of binary.
  Augmented code which defines the obs_seq name to handle multiple years, leap years included.

full_experiment/auto_diagPOP2ms_LSF.csh
  New variation of auto_diag2ms_LSF.csh to handle archiving of coupler history files
  needed to force POP assimilations.
  tcsh needed instead of csh to use :r, :e, etc. for handling cplr filenames.

Modified Paths:
--------------
    DART/trunk/models/cam/full_experiment/analyses2initial.csh
    DART/trunk/models/cam/full_experiment/job_mpi.csh

Added Paths:
-----------
    DART/trunk/models/cam/full_experiment/auto_diagPOP2ms_LSF.csh
    DART/trunk/models/cam/full_experiment/clm_FillValue_vars.csh

-------------- next part --------------
Modified: DART/trunk/models/cam/full_experiment/analyses2initial.csh
===================================================================
--- DART/trunk/models/cam/full_experiment/analyses2initial.csh	2010-06-03 17:07:54 UTC (rev 4383)
+++ DART/trunk/models/cam/full_experiment/analyses2initial.csh	2010-06-03 17:16:03 UTC (rev 4384)
@@ -10,15 +10,13 @@
 # on a file (Posterior usually) and put each timeslot into CAM initial file format.
 # The non-state fields are ensemble averages of all the caminput_#.nc from the same 
 # timeslot.
-#
-# The CLM ensemble clminput_#.nc (all non-state fields) is ensemble averaged, 
-# and then the snow fields are overwritten using the algorithm from 
-# cam#.#.#/models/lnd/clm2/src/main/snowdp2lev.F90
-# packaged as an NCO script, which is used by ncap2.
-# ncap2 may need to be updated to NCO 3.9.4 or later.
 
+# The CLM ensemble clminput_#.nc (all are non-state fields) is ensemble averaged by NCO, 
+# and then the snow and water fields are overwritten using the fortran program clm_ens_avg.
+
 # set echo verbose
 
+
 # Called from auto_diag2ms_LSF.csh with
 #   ../../analyses2initial.csh no_MS '.' Posterior copy 1 ${obs_seq}H              >>& $saved
 
@@ -115,15 +113,57 @@
       rm time_copy_slab.nc avgd_copy_out.nc re-order* 
 
       #-----------
-      # CLM; ensemble average of the clminput files.   ncra can't be used because files
-      #      have no record dimension.  This will have incorrect snow and water fields.
+      # CLM; 
+      # Check that clminput_##.nc files have the _FillValue set, so that averaging will ignore
+      # those members with spvals.
+      set num_Fills = 0
+      set num_Fills = `ncdump -h clminput_1.nc | grep FillValue | wc -l`
+      # Add the _FillValue attribute to the fields which might need it
+      if ($num_Fills[1] == 0) then
+         if (-e ../../../clm_FillValue_fields) then
+            set num_ens = `ls -l clminput_*.nc | wc -l`
+            set ens = 1
+            while ($ens <= $num_ens[1])
+               # clm_FillValue_fields needs to have at least 1 line consisting of
+               # spval [space] associated_fields (separated by |) (with optional wildcard characters)
+               # Then the quoting must proceed as shown.
+               # ncatted -O -h -a           _FillValue,"$include_Fills",c,d,1.0e36  clminput_${ens}.nc
+               #   ncatted -O -h -a _FillValue,'T_REF2M_MAX_INST(_R|_U)?',m,d,-1.0e36 clminput_${ens}.nc
+               # If a field in Fills is not on the files, NO ERROR will result.
+               # -h prevents the addition of the FillValue attr from being added to the history global attr.
+               # ,c means create this attr for vars which don't have it.
+               # ,d means this attr will be type 'double', which is the type of most variables.
+               #    Will it be converted to integer for those variables? Only SNLSNO is in this category
+               #    and it will be handled manually in clm_ens_avg.f90
+               set num_spvals = `wc -l ../../../clm_FillValue_fields`
+               set spv = 1
+               while ($spv <= $num_spvals[1])
+                  set Fills = `head -$spv ../../../clm_FillValue_fields | tail -1`
+                  ncatted -O -h -a _FillValue,"$Fills[2]",c,d,$Fills[1] clminput_${ens}.nc
+                  @ spv++
+               end
+               @ ens++
+            end
+         else
+            echo "Need a clm_FillValue_fields in the CENTRAL directory"
+         endif
+      endif
+
+      # Ensemble average of the clminput files.   
+      # ncra can't be used because files have no record dimension.  
+      # This will have incorrect snow and water fields.
+      # Anything excluded from averaging (with -x -v vars) will not appear on output file,
+      # even if output file pre-exists with those vars on it.
       ncea -O -o clm_ens_avg.nc clminput_[1-9]*.nc 
 
       # Create a file of the ensemble of the snow and water fields which need to be fixed.
       set cat_flds = 'DZSNO,H2OSNO,H2OSOI_LIQ,H2OSOI_ICE,SNLSNO,SNOWDP,T_SOISNO'
-      set cat_flds = "${cat_flds},snw_rds,qflx_snofrz_lyr,mss_bcpho,mss_bcphi,mss_ocpho,mss_ocphi"
-      set cat_flds = "${cat_flds},mss_dst1,mss_dst2,mss_dst3,mss_dst4"
-      set cat_flds = "${cat_flds},flx_absdv,flx_absdn,flx_absiv,flx_absin"
+      ncdump -h clminput_1.nc | grep snw_rds        > /dev/null
+      if ($status == 0) then
+         set cat_flds = "${cat_flds},snw_rds,qflx_snofrz_lyr,mss_bcpho,mss_bcphi,mss_ocpho,mss_ocphi"
+         set cat_flds = "${cat_flds},mss_dst1,mss_dst2,mss_dst3,mss_dst4"
+         set cat_flds = "${cat_flds},flx_absdv,flx_absdn,flx_absiv,flx_absin"
+      endif
       ncecat -u ensemble -v ${cat_flds} -o snow_water_ens.nc clminput_[1-9]*.nc
 
       # Fix the snow fields with fortran program clm_ens_avg.f90 (different versions for 
@@ -137,13 +177,23 @@
       cp clminput_1.nc clm_${out_name}
       ncks -A -x -v '^timemgr' -o clm_${out_name} clm_ens_avg.nc
 
-# Remove files which won't be removed after archiving in auto_diag2ms_LSF.csh
+      # Added for comparison of forecasts with member 1 vs the ens avg.
+      cp clminput_1.nc clm_init_memb1_${yrmoday}$hours[$time].nc 
+
+      # Remove files which won't be removed after archiving in auto_diag2ms_LSF.csh
       rm snow_water_ens.nc input.nml dart*
 
       #-----------
-      # ICE; no averaging available yet; use the first ensemble member as the "analysis"
-      cp iceinput_1.tar     ice_init_memb1_${yrmoday}$hours[$time].tar
+      # ICE; 
+      # The ensemble average is simple, even though (because?) the ICE files have no variable attributes,
+      # there are no coordinate variables corresponding to the dimensions, and 3.6.71 has *global* 
+      # missing_ and Fill_ Value attributes set to 0 (instead of the spval used in CICE; 1e+30).
+      # ncea pruning unused dimensions doesn't seem to be a problem (yet).
+      ncea -O -o ice_${out_name} iceinput_[1-9]*.nc
 
+      # Also save the first ensemble member as the "analysis"
+      if (-e iceinput_1.nc) cp iceinput_1.nc     ice_init_memb1_${yrmoday}$hours[$time].nc
+
       cd ..
       @ time++
    end

Added: DART/trunk/models/cam/full_experiment/auto_diagPOP2ms_LSF.csh
===================================================================
--- DART/trunk/models/cam/full_experiment/auto_diagPOP2ms_LSF.csh	                        (rev 0)
+++ DART/trunk/models/cam/full_experiment/auto_diagPOP2ms_LSF.csh	2010-06-03 17:16:03 UTC (rev 4384)
@@ -0,0 +1,348 @@
+#!/bin/tcsh
+# csh doesn't let :r, :e, etc work in the coupler history files archiving section.
+#!/bin/csh
+#
+# DART software - Copyright \xA9 2004 - 2010 UCAR. This open source software is
+# provided by UCAR, "as is", without charge, subject to all terms of use at
+# http://www.image.ucar.edu/DAReS/DART/DART_download
+#
+# $Id$
+#
+# script for copying diagnostics files to mass store.
+
+#### LSF options for BSUB
+### -J      job name    (master script job.csh presumes filter.xxxx.log)
+### -o      output listing filename
+### -P      account number
+### -q # Queue name    regular   economy  standby     long   
+#        proclim            32        16        8        2      
+#        timelim         6 hrs        18       48   5 days  
+#        # jobs/person       2         -        -        2
+### -n      number of tasks (processors)
+### -x      exclusive use of node
+### -R "span[ptile=(num procs you want on each node)]"
+#
+#BSUB -J auto_diag2ms
+#BSUB -o auto_diag2ms.%J.log
+#BSUB -e auto_diag2ms.%J.err
+#BSUB -P 93300315
+#BSUB -q share
+#BSUB -W 2:00
+#BSUB -n 1
+#BSUB -R "span[ptile=1]"
+#xxxx -x
+# Caused job to not run   #BSUB -xn
+
+
+
+
+# test for new NCO
+# set  echo verbose
+# 
+# set path = ( /usr/local/bin \
+# /usr/local/lsf/7.0/aix5-64/etc \
+# /usr/local/lsf/7.0/aix5-64/bin \
+# /usr/bin \
+# /etc \
+# /usr/sbin \
+# /usr/ucb \
+# /usr/bin/X11 \
+# /sbin \
+# /usr/java14/jre/bin \
+# /usr/java14/bin \
+# /bin \
+# /blhome/raeder/bin \
+# . \
+# /contrib/tunnel \
+# /usr/local/apps/nco-3.9.6/bin \
+# /blhome/raeder/scripts \ )
+# 
+# which ncap2 
+# 
+# unset echo
+# end NCO test
+
+
+
+
+
+setenv LD_LIBRARY_PATH ${LD_LIBRARY_PATH}:/usr/local/dcs/lib
+
+set compress = true
+set proj_num = 93300315
+
+set diag_name = diagnostics.tar
+set saved = saved_diagnostics
+set write_pass = $$
+
+if ($?LS_SUBCWD) then
+   cd $LS_SUBCWD
+endif
+
+touch $saved
+echo '------------------------------------------------------------' >> $saved
+echo 'auto_diag2ms_LSF starts in' >> $saved
+pwd                               >> $saved
+date                              >> $saved
+
+set direct = `pwd`
+set obs_seq = $direct:t
+
+cd ..
+set direct = `pwd`
+set case = $direct:t
+
+cd ..
+set direct = `pwd`
+set exp_dir = $direct:t
+
+cd $case/${obs_seq}
+set ms_dir = /RAEDER/DAI/${exp_dir}/$case/${obs_seq}
+
+# IBM tar requires 1 entry/line in list of things to exclude from tar
+echo DART                >! tar_excl_list
+echo CAM                 >> tar_excl_list
+echo CLM                 >> tar_excl_list
+echo ICE                 >> tar_excl_list
+# batch* are the files into which DART,CAM,CLM are archived by auto_re2ms_LSF.csh,
+# which is run at the same time as this script.
+echo 'batch*'            >> tar_excl_list
+echo $saved              >> tar_excl_list
+
+## Added to make mean easily accessible in the form of a CAM initial file
+echo 'cam_analyses.tar'  >> tar_excl_list
+echo 'H[0-9]*'           >> tar_excl_list
+echo 'H_*'               >> tar_excl_list
+
+#-----------------------------
+# Stuff the Posterior mean fields into CAM initial files.
+# Tar together with a CLM initial file.
+# Arguments to analyses2initial.csh are
+#   set ms_file   = $1   script searches for local Posterior_Diag.nc first, so give a dummy MS name.
+#   set local_dir = $2
+#   set kind      = $3
+#   set dim       = $4
+#   set element1  = $5
+#   set yrmoday   = $6   set to $obs_seq instead of yyyymmdd, since obs_seq is easily available
+
+# Save out history files from H* directories before calculating CAM/CLM analyses and deleting H*
+# compressing saves a factor of 12.
+# Can't do with first CAM 3.6.71; empty_htapes conflicted with ENDOFRUN
+ ls H[012]*/*.h0.* >& /dev/null
+if ($status == 0) then
+   gzip H[012]*/*.h0.*
+   tar -c -f H_all.h0.gz.tar H[012]*/*.h0.*
+   set ar_status = $status
+   if ($ar_status == 0 && -e H_all.h0.gz.tar) then
+      msrcp -pe 1000 -pr $proj_num -wpwd $write_pass -comment "write password $write_pass" \
+            H_all.h0.gz.tar mss:${ms_dir}/H_all.h0.gz.tar                    >>& $saved  &
+      set ar_status = $status
+      if ($ar_status == 0) rm H[012]*/*.h0.* 
+   endif
+   if ($ar_status != 0) echo 'ARCHIVING of H[012]*.h0.gz.tar FAILED' >> & $saved
+else
+    echo 'ARCHIVING of H[012]*.h0.gz.tar FAILED; no files available' >> & $saved
+endif 
+
+# Save out coupler history files from H* directories before calculating CAM/CLM analyses and deleting H*
+# Compressing saves a factor of ??.
+# File names have the form H##/FV_2deg_greg-O2-POP1-1.cpl.ha2x6h[r].2006-12-03.nc   ## = 06,12,18,24
+# These fields were identified from the sample input file provided by Yeager and/or Lindsey. 
+
+
+# ? Keep time_bnds too
+
+set flds = 'time,doma_lat,doma_lon,doma_area,doma_mask'
+set flds = "${flds},a2x6h_Faxa_swndr,a2x6h_Faxa_swvdr,a2x6h_Faxa_swndf,a2x6h_Faxa_swvdf"
+set flds = "${flds},a2x6h_Faxa_rainc,a2x6h_Faxa_rainl,a2x6h_Faxa_snowc,a2x6h_Faxa_snowl"
+set flds = "${flds},a2x6h_Sa_z,a2x6h_Sa_u,a2x6h_Sa_v,a2x6h_Sa_tbot,a2x6h_Sa_ptem,a2x6h_Sa_shum"
+set flds = "${flds},a2x6h_Sa_pbot,a2x6h_Sa_dens,a2x6h_Sa_pslv,a2x6h_Faxa_lwdn"
+
+set memb = 1
+set more = true
+while ($more == true )
+   ls H[012]*/*-${memb}.cpl.ha2x6h*.* >& /dev/null
+   if ($status != 0) then
+      set more = false
+      if ($memb == 1) then
+         echo 'ARCHIVING of H[012]/*cpl.ha2x6h.* FAILED; no files available' >> & $saved
+      endif
+   else
+      # Extract filenames' pieces
+      set hists = `ls H[012]*/*-${memb}\.cpl\.ha2x6h*`
+      set date = $hists[1]:r:e
+      set case = $hists[1]:t:r:r:r:r
+
+      # Ensemble average of the 4 6-hour averages for each ensemble member.
+
+
+      ncra -O -v ${flds} -o ${case}.cpl.ha2x1davg.${date}.nc $hists
+
+      echo 'ncra created average file:'                                     >> & $saved
+      ls ${case}.cpl.ha2x1davg.${date}.nc                                   >> & $saved
+      echo 'from'                                                           >> & $saved
+      ls -l $hists                                                          >> & $saved
+
+      # ncea prunes unused dimensions (here; time) from the output file.  
+      # That's OK because lots of these averaged
+      # files will be concatenated into the single file which datm/cplr wants to see.
+      # The time dimension can be reinstated then.
+      # So don't remove the averaged files after they've been archived; 
+      # concatenate them together into months while bundling the obs_seq.final files.
+      ncap2 -O -s 'time[$time]={.5}' ${case}.cpl.ha2x1davg.${date}.nc ${case}.cpl.ha2x1davg.${date}.nc 
+
+      # Concatenate the times for each day into a single file using record (time) concatenator.
+      ncrcat -v ${flds} -o ${case}.cpl.ha2x1dx6h.${date}.nc $hists
+      echo 'ncrcat created daily time series file:'                                     >> & $saved
+      ls ${case}.cpl.ha2x1dx6h.${date}.nc                                   >> & $saved
+      echo 'from'                                                           >> & $saved
+      ls -l $hists                                                          >> & $saved
+
+      # The times on the concatenated cplr history file are time = 0.25, 0.25, 0.25, 0.25 ;
+      #   as measured from    time:units = "days since 2006-12-02 00:00:00" ;
+      # ? Fix that when generating, or 
+      #   Fix for each day here, then add day information during final concatenation into monthly files.
+      ncap2 -O -s 'time[$time]={0.25,.5,.75,1.0}' ${case}.cpl.ha2x1dx6h.${date}.nc \
+            ${case}.cpl.ha2x1dx6h.${date}.nc
+
+      # Compress the daily avg and time series files for this ens member while working on the next
+      gzip ${case}.cpl.ha2x1d* &
+
+   endif
+   @ memb++
+end
+
+wait
+tar -c -f H_cplr.ha2x1dx6h.gz.tar *.ha2x1dx6h*gz &
+tar -c -f H_cplr.ha2x1davg.gz.tar *.ha2x1davg*gz &
+wait
+tar -c -f H_cplr.ha2x1d.gz.tar *.ha2x1d[ax]*tar
+set ar_status = $status
+if ($ar_status == 0 && -e H_cplr.ha2x1d.gz.tar) then
+   msrcp -pe 1000 -pr $proj_num -wpwd $write_pass -comment "write password $write_pass" \
+         H_cplr.ha2x1d.gz.tar mss:${ms_dir}/H_cplr.ha2x1d.gz.tar                    >>& $saved  
+   set ar_status = $status
+   # Leave ha2x2davg.gz.tar here for monthly archiving, like obs_seq.final
+   if ($ar_status == 0) then
+      rm H[012]*/*.ha2x6h*  *.ha2x1dx6h*.gz *.ha2x1davg*.gz &
+      echo "SUCCEEDED archiving H_cplr.ha2x1d.gz.tar" >>& $saved
+   endif
+endif
+if ($ar_status != 0) echo 'ARCHIVING of *.ha2x1d.gz.tar FAILED' >> & $saved
+
+#-------------------------------------
+# Archive the analyses.
+if (-e ../../analyses2initial.csh) then
+   # analyses2initial.csh needs CAM initial files to average and receive the analyses 
+   # from Posterior_Diag.nc.
+   # They should have been saved during the assimilation and be living in the 
+   # exp/obs_####/H## directories.
+   ls H*/caminput_1.nc > /dev/null
+   set stat = $status
+   if ($stat == 0) then
+      ls H*/clminput_1.nc > /dev/null
+      set stat = $status
+   endif
+   if ($stat != 0) then
+      echo "H*/c[al]minput_* not available"                           >>& $saved
+      echo "H*/c[al]minput_* not available" >&  ANALYSES_NOT_SAVED
+   else
+#   if ($status == 0) ls H*/clminput_1.nc > /dev/null
+#   if ($status != 0) then
+#      echo "H*/c[al]minput_* not available; exiting"                           >>& $saved
+#      echo "H*/c[al]minput_* not available; exiting" >&  ANALYSES_NOT_SAVED
+#      exit
+#   endif
+   
+      set num_anal = `ls H[0-2]*/cam_init_*`
+      set tar_stat = 0
+      if (! -e cam_analyses.tar) then
+         if ($#num_anal < 4) then
+            echo " "                                                                >>& $saved
+            ../../analyses2initial.csh no_MS '.' Posterior copy 1 ${obs_seq}        >>& $saved
+            set num_anal = `ls H[0-2]*/cam_init_*`
+         endif
+   
+         tar -c -f cam_analyses.tar H[0-2]*/{c,ice}*_init_* 
+         set tar_stat = $status
+      endif
+      # This section requires that old/failed save_diagnostic files be left in place.
+      # cam_init_analysis is part of a filename constructed in analyses2initial.csh
+      # and printed for each H# directory.
+# Sometimes H06 doesn't get written to saved_diagnostics by analyses2initial.csh
+#      grep cam_init_analysis saved_diagnostics >&! cam_init
+#      set num_anal = `wc -l cam_init`
+# This assumes 4 analysis times/obs_seq file
+#      if ($tar_stat == 0 && $num_anal[1] > 3)  \
+      if ($tar_stat == 0 )  \
+         msrcp -pe 1000 -pr $proj_num -wpwd $write_pass -comment "write password $write_pass" \
+               cam_analyses.tar mss:${ms_dir}/cam_analyses.tar                    >>& $saved  
+      set list = `ls -l cam_analyses.tar`
+      set local_size = $list[5]
+      set list = `msls -l ${ms_dir}/cam_analyses.tar`
+      set ms_size = $list[5]
+      echo " cam_analyses.tar local_size = $local_size, ms_size = $ms_size"       >> $saved
+   
+      if ($local_size == $ms_size) then
+         echo "Archived $ms_dir/cam_analyses.tar with write password $write_pass" >> $saved
+         echo '    REMOVING H[0-9]* and cam_analyses.tar '                        >> $saved
+         rm -rf H[0-9]*/[ci]* cam_analyses.tar cam_init
+      else
+         echo "msrcp of ${ms_dir}/cam_analyses.tar  failed; "                     >> $saved
+         echo 'NOT removing H[0-9]* and cam_analyses.tar '                        >> $saved
+      endif
+   endif
+else
+   echo "NO analyses2initial.csh, so no CAM initial file format analyses created"
+endif
+
+#-----------------------------
+
+if (! -e $diag_name && ! -e ${diag_name}.gz) tar -c -v -f $diag_name -X tar_excl_list * >>& $saved
+rm tar_excl_list
+
+if ($compress == true) then
+   if (-e $diag_name) then
+      gzip $diag_name                         >>& $saved
+      set diag_name = ${diag_name}.gz
+   else if (-e ${diag_name}.gz) then
+      set diag_name = ${diag_name}.gz
+   else
+      echo "$diag_name does not exist at gzip" >> $saved
+      exit
+   endif
+endif
+
+echo "files will be written to ${ms_dir}/${diag_name}" >> $saved
+echo "with write password $write_pass" >> $saved
+
+msrcp -pe 1000 -pr $proj_num -wpwd $write_pass -comment "write password $write_pass" \
+      ${diag_name} mss:${ms_dir}/${diag_name} >>& $saved
+
+set list = `ls -l $diag_name`
+set local_size = $list[5]
+set list = `msls -l ${ms_dir}/${diag_name}`
+set ms_size = $list[5]
+echo " ${diag_name} local_size = $local_size, ms_size = $ms_size" >> $saved
+
+if ($local_size == $ms_size) then
+   echo "Archived files with write password $write_pass" >> $saved
+   echo "msrcp of $ms_dir/$diag_name succeeded; REMOVING $diag_name and P*Diag.nc " >> $saved
+   rm $diag_name P*Diag.nc
+else
+   echo "msrcp of ${ms_dir}/$obs_seq  failed; " >> $saved
+   echo "NOT removing $diag_name and P*.nc"      >> $saved
+endif
+
+chmod 444 $saved
+
+wait
+if ($ar_status == 0) rm H_all.h0.gz.tar  *.ha2x1dx6h*.gz.tar
+
+exit 0
+
+# <next few lines under version control, do not edit>
+# $URL$
+# $Revision$
+# $Date$
+


Property changes on: DART/trunk/models/cam/full_experiment/auto_diagPOP2ms_LSF.csh
___________________________________________________________________
Added: svn:executable
   + *
Added: svn:mime-type
   + text/plain
Added: svn:keywords
   + Date Rev Author HeadURL Id
Added: svn:eol-style
   + native

Added: DART/trunk/models/cam/full_experiment/clm_FillValue_vars.csh
===================================================================
--- DART/trunk/models/cam/full_experiment/clm_FillValue_vars.csh	                        (rev 0)
+++ DART/trunk/models/cam/full_experiment/clm_FillValue_vars.csh	2010-06-03 17:16:03 UTC (rev 4384)
@@ -0,0 +1,75 @@
+#!/bin/csh
+
+# The special values(!) which may be found on the CLM initial/restart files.
+# CLM 3.6.71 set spvals = ('1e+36' '-1e+36')
+# ICE 4.0
+#  -1.836 is the value of Tsfcn (and eicen? no; embedded in larger numbers) 
+#         over land for member 1, but NOT 2 or 3 (or ....?)
+set spvals = ('\-1\.836' '1e+30' )
+
+if ($#argv == 0) then
+   echo "Usage: edit find_FillValue_vars.csh to provide spvalS"
+   echo "       find_FillValue_vars.csh file.nc"
+   exit
+else
+   set file = $1
+   if (-e $file:r.FillValue_list) then
+      echo $file:r.FillValue_list exists: move and retry
+      exit
+   endif
+endif
+
+# Generate a list of variables from the input file.
+set head = $file:r.head
+ncdump -h $file | grep double  >! $head
+ncdump -h $file | grep ' int ' >> $head
+
+touch $file:r.FillValue_list
+
+set num_vars = `wc -l $head`
+echo num_vars = $num_vars
+foreach spv ($spvals)
+   # Make a list of vars which have it.
+   set spvars = ()
+   set n = 1
+   # Check each variable for this spval.  
+   while ($n <= $num_vars[1])
+      head -$n $head | tail -1 >! varstring
+      # Replace the ( with a ' ' so that the variable name becomes a separate word.
+      set string = `sed -e "s#(# #g" varstring`
+      set var = $string[2]
+      ncks -v $var $file | grep "$spv" >! spvals
+      set num_spvals = `wc -l spvals`
+      if ($num_spvals[1] > 0) then
+         set spvars = ($spvars $var)
+      endif
+   
+      @ n++
+   end
+   echo spval = $spv
+   echo spvars = $spvars
+   
+   # convert the word list into the string which will be written out 
+   # and read in by analyses2initial.csh and used to add _FillValue 
+   # to the necessary variables in CLM initial files.
+   echo "#spvars = $#spvars"
+   set n = 0
+   set l = 1
+   @ num_lines = ($#spvars / 10) + 1
+   while ($l <= $num_lines)
+      @ nend = $l * 10
+      if ($l == $num_lines) @ nend = $#spvars 
+      @ n++
+      set var_list = $spvars[$n]
+      while ($n < $nend)
+         @ n++
+         set var_list = "$var_list|$spvars[$n]"
+      end
+      echo $spv $var_list >> $file:r.FillValue_list
+      @ l++
+   end
+end
+   
+rm varstring spvals
+
+exit


Property changes on: DART/trunk/models/cam/full_experiment/clm_FillValue_vars.csh
___________________________________________________________________
Added: svn:executable
   + *
Added: svn:mime-type
   + text/plain
Added: svn:keywords
   + Date Rev Author HeadURL Id
Added: svn:eol-style
   + native

Modified: DART/trunk/models/cam/full_experiment/job_mpi.csh
===================================================================
--- DART/trunk/models/cam/full_experiment/job_mpi.csh	2010-06-03 17:07:54 UTC (rev 4383)
+++ DART/trunk/models/cam/full_experiment/job_mpi.csh	2010-06-03 17:16:03 UTC (rev 4384)
@@ -346,8 +346,6 @@
    set CAM_phis = $CAM_src/cam_phis.nc
    set num_lons  = 64
    set num_lats  = 32
-   # < CAM 3.6.0: 
-   set num_levs  = 26
 else if ($resol == T42) then
    # T42
    set DART_ics_1  = /ptmp/dart/CAM_init/T42/03-01-01/DART_MPI
@@ -360,8 +358,6 @@
    set CAM_phis = $CAM_src/cam_phis.nc
    set num_lons  = 128
    set num_lats  = 64
-   # < CAM 3.6.0: 
-   set num_levs  = 26
 else if ($resol == T85) then
    # T85
    set DART_ics_1  = /ptmp/dart/CAM_init/T85_cam3.5/Jul_1/DART
@@ -373,8 +369,6 @@
    # set CAM_src      = /ptmp/dart/CAM/CAM_src/Cam3/cam3.1/models/atm/cam/bld/T85_3.1-O3
    set num_lons  = 256
    set num_lats  = 128
-   # < CAM 3.6.0: 
-   set num_levs  = 26
 else if ($resol == FV4x5) then
    set DART_ics_1  = /ptmp/dart/CAM_init/FV4x5/03-01-01/DART_MPI
    set CAM_ics_1   = /ptmp/dart/CAM_init/FV4x5/03-01-01/CAM/caminput_
@@ -385,8 +379,6 @@
    set CAM_phis = $CAM_src/cam_phis.nc
    set num_lons  = 72
    set num_lats  = 46
-   # < CAM 3.6.0: 
-   set num_levs  = 26
 else if ($resol == FV1.9x2.5) then
    set DART_ics_1  = /ptmp/dart/CAM_init/FV1.9x2.5_cam3.6.26/Aug_1/DART
    set CAM_ics_1   = /ptmp/dart/CAM_init/FV1.9x2.5_cam3.6.26/Aug_1/CAM/caminput_
@@ -399,11 +391,12 @@
    # Running CAM parallel (async=4) may require grid info for domain decomposition.
    set num_lons  = 144
    set num_lats  = 96
-   # < CAM 3.6.0: set num_levs  = 26
-   set num_levs = 30
    # To use real SSTs it's necessary to pass matching stream info to the ice model by a special namelist
    # which is done through casemodel.
    # This must be done (now) even for CAM < 3.6; enter 'none' and 0s in that case.
+   # Newer CAMs need specific namelist choices.
+   # Define a wordlist to test for appropriate namelist entries.
+   set cam_version = ( 3 6 71 )
    set sst = '/ptmp/dart/CAM_init/FV1.9x2.5_cam3.5/Namelistin_files/sst_HadOIBl_bc_1.9x2.5_1949_2007.nc' 
    set str_yr_first = 1949
    set str_yr_last  = 2007
@@ -416,6 +409,9 @@
 # This is only used if obs_seq_1_depend = false 
 set namelist = 'cwd'
 
+set num_levs  = 26
+if ($cam_version[1] > 3 || ($cam_version[1] == 3 && $cam_version[2] > 5))  set num_levs = 30
+
 if (${parallel_cam} == true) then
    set CAM_src = ${CAM_src}-mpi                                                 
 endif                                                                         
@@ -588,7 +584,7 @@
    if (-e caminput_1.nc) then
       ${REMOVE} clminput_[1-9]*.nc 
       ${REMOVE} caminput_[1-9]*.nc 
-      ${REMOVE} iceinput_[1-9]*.tar 
+      ${REMOVE} iceinput_[1-9]*.nc 
    endif
 
    # Remove any possibly stale CAM surface files
@@ -704,29 +700,29 @@
       echo "##==================================================================" >> ${job_i}
    endif
 
-   if ($parallel_cam == 'false' && $?LS_SUBCWD) then
-      # This environment variable tells how many processors on each node to use
-      # which will depend on the per-processor memory, the model memory high-water mark
-      # the ensemble size and other things.
-      # The following numbers are for bluefire (IBM Power6 chip) with ~2 Gb memory /processor
-      # and 32 processors/node.
-      if ($num_procs == 96) then
-         # want 80 members = 1*28 + 2*26
-         echo "setenv LSB_PJL_TASK_GEOMETRY \"                                                         >> ${job_i}
-         echo ' "{(0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27)\'        >> ${job_i}
-         echo " (28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53)\"      >> ${job_i}
-         echo ' (54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79)}" '    >> ${job_i}
-      else if ($num_procs == 32) then
-         # I want 20 = 1*20
-         echo "setenv LSB_PJL_TASK_GEOMETRY \"                  >> ${job_i}
-         echo ' "{(0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19)}"'            >> ${job_i}
-      else
-         echo "parallel_cam is false, but num_procs is not 96 or 48 or 32" >> $MASTERLOG
-         exit
-      endif
+#   if ($parallel_cam == 'false' && $?LS_SUBCWD) then
+#      # This environment variable tells how many processors on each node to use
+#      # which will depend on the per-processor memory, the model memory high-water mark
+#      # the ensemble size and other things.
+#      # The following numbers are for bluefire (IBM Power6 chip) with ~2 Gb memory /processor
+#      # and 32 processors/node.
+#      if ($num_procs == 96) then
+#         # want 80 members = 1*28 + 2*26
+#         echo "setenv LSB_PJL_TASK_GEOMETRY \"                                                         >> ${job_i}
+#         echo ' "{(0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26)\'           >> ${job_i}
+#         echo " (27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53)\"   >> ${job_i}
+#         echo ' (54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79)}" '    >> ${job_i}
+#      else if ($num_procs == 32) then
+#         # I want 20 = 1*20
+#         echo "setenv LSB_PJL_TASK_GEOMETRY \"                  >> ${job_i}
+#         echo ' "{(0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19)}"'            >> ${job_i}
+#      else
+#         echo "parallel_cam is false, but num_procs is not 96 or 48 or 32" >> $MASTERLOG
+#         exit
+#      endif
+#
+#   endif
 
-   endif
-
    echo "set myname = "'$0'"     # this is the name of this script"            >> ${job_i}
    echo "set CENTRALDIR =  ${CENTRALDIR} "                                     >> ${job_i}
    echo "cd ${CENTRALDIR}"                                                     >> ${job_i}
@@ -803,14 +799,28 @@
    else if ($obs_seq_freq > 0) then
 #     Subtract off the days in months before the current month
 #     one month at a time
-      @ month = $mo - 1
-      if ($month == 0) @ month = 12
-      @ seq_in_month = $days_in_mo[$month] * $obs_seq_freq
-      while ($seq > $seq_in_month)
-          @ seq = $seq - $seq_in_month
+#      @ month = $mo - 1
+#      if ($month == 0) @ month = 12
+#      @ seq_in_month = $days_in_mo[$month] * $obs_seq_freq
+#      while ($seq > $seq_in_month)
+#          @ seq = $seq - $seq_in_month
+#          @ month = $month - 1
+#          if ($month == 0) @ month = 12
+#          @ seq_in_month = $days_in_mo[$month] * $obs_seq_freq
+#      end
+      @ month = $mo
+      @ yr = $year
+      @ seq_in_mo = $days_in_mo[$month] * $obs_seq_freq
+      if ($month == 2 && ($yr % 4) == 0) @ seq_in_mo = $seq_in_mo + $obs_seq_freq
+      while ($seq > $seq_in_mo)
           @ month = $month - 1
-          if ($month == 0) @ month = 12
+          if ($month == 0) then
+             @ month = 12
+             @ yr--
+          endif
           @ seq_in_month = $days_in_mo[$month] * $obs_seq_freq
+          if ($month == 2 && ($yr % 4) == 0) @ seq_in_month = $seq_in_month + $obs_seq_freq
+          @ seq = $seq - $seq_in_month
       end
       @ month = $mo
       if ($month < 10) set month = 0$month
@@ -1024,6 +1034,8 @@
       echo 'if (! -d ${NHOME}) mkdir ${NHOME} '                            >> ${job_i}
       echo 'setenv HOME ${NHOME} '                                         >> ${job_i}
       echo "${run_command} ./filter &"                                     >> ${job_i}
+# kdr add in bkill code from async = 2, below, to prevent job_i from finishing 'OK'
+#     after filter dies from , e.g., wrong date.
       echo 'setenv HOME ${OHOME} '                                         >> ${job_i}
       echo " "                                                             >> ${job_i}
       echo 'while ( -e filter_to_model.lock )          '                   >> ${job_i}
@@ -1076,6 +1088,10 @@
       # Run the filter in async=2 mode.
       # runs filter, which tells the model to model advance and assimilates obs
       echo "${run_command} ./filter "                                    >> ${job_i}
+      echo 'if ($status != 0) then '                                     >> ${job_i}
+      echo '   touch FILTER_DIED '                                       >> ${job_i}
+      echo '   bkill $LSB_JOBID '                                        >> ${job_i}
+      echo 'endif  '                                                     >> ${job_i}
    endif
 
    #-----------------
@@ -1196,7 +1212,7 @@
    echo 'while ($n <= '"${num_ens})    ;# loop over all ensemble members "            >> ${job_i}
    echo '   set CAMINPUT = caminput_${n}.nc  '                                        >> ${job_i}
    echo '   set CLMINPUT = clminput_${n}.nc  '                                        >> ${job_i}
-   echo '   set ICEINPUT = iceinput_${n}.tar     '                                        >> ${job_i}
+   echo '   set ICEINPUT = iceinput_${n}.nc     '                                     >> ${job_i}
 
    echo " "                                                                           >> ${job_i}
    echo '   if ( -e $CAMINPUT && ! -z $CAMINPUT) then '                               >> ${job_i}
@@ -1253,7 +1269,7 @@
    echo '   while ($ens <= '$num_ens" )  "                                            >> ${job_i}
    echo "      cp ${CAM_ics_1}"'${ens}'".nc   ${out_full}/CAM "                       >> ${job_i}
    echo "      cp ${CLM_ics_1}"'${ens}'".nc   ${out_full}/CLM "                       >> ${job_i}
-   echo "      cp ${ICE_ics_1}"'${ens}'".tar  ${out_full}/ICE "                       >> ${job_i}
+   echo "      cp ${ICE_ics_1}"'${ens}'".nc   ${out_full}/ICE "                       >> ${job_i}
 #   echo "      cp $ICE_ics_1:h/"'*-${ens}\.cice\.r\.volpn\.[0-9]*'" ${out_full}/ICE " >> ${job_i}
    echo "      @ ens++  "                                                             >> ${job_i}
    echo "   end  "                                                                    >> ${job_i}


More information about the Dart-dev mailing list