[Dart-dev] [3384] DART/trunk/models/MITgcm_ocean:
I was able to successfully assimilate two successive days.
nancy at ucar.edu
nancy at ucar.edu
Thu May 22 17:58:59 MDT 2008
An HTML attachment was scrubbed...
URL: http://mailman.ucar.edu/pipermail/dart-dev/attachments/20080522/b3da8296/attachment.html
-------------- next part --------------
Modified: DART/trunk/models/MITgcm_ocean/model_mod.f90
===================================================================
--- DART/trunk/models/MITgcm_ocean/model_mod.f90 2008-05-22 22:48:33 UTC (rev 3383)
+++ DART/trunk/models/MITgcm_ocean/model_mod.f90 2008-05-22 23:58:59 UTC (rev 3384)
@@ -372,7 +372,6 @@
if ((deltaTmom == deltaTtracer) .and. &
(deltaTmom == deltaTClock ) .and. &
(deltaTClock == deltaTtracer)) then
- model_timestep = set_time(86400, 0) ! works with deltaTmom > 86400
timestep = deltaTmom ! need a time_type version
else
write(msgstring,*)"namelist PARM03 has deltaTmom /= deltaTtracer /= deltaTClock"
@@ -383,7 +382,8 @@
call error_handler(E_ERR,"static_init_model", msgstring, source, revision, revdate)
endif
-model_time = timestep_to_DARTtime(timestepcount)
+model_timestep = set_time(nint(endTime), 0) ! works with deltaTmom > 86400
+model_time = timestep_to_DARTtime(timestepcount)
! Grid-related variables are in PARM04
delX(:) = 0.0_r4
Modified: DART/trunk/models/MITgcm_ocean/shell_scripts/advance_model.csh
===================================================================
--- DART/trunk/models/MITgcm_ocean/shell_scripts/advance_model.csh 2008-05-22 22:48:33 UTC (rev 3383)
+++ DART/trunk/models/MITgcm_ocean/shell_scripts/advance_model.csh 2008-05-22 23:58:59 UTC (rev 3384)
@@ -1,4 +1,4 @@
-#!/bin/tcsh
+#!/bin/tcsh -v
#
# Data Assimilation Research Testbed -- DART
# Copyright 2004-2007, Data Assimilation Research Section
@@ -41,14 +41,15 @@
# Copy the namelist files - these are small, so we really copy them
foreach FILE ( data data.cal data.exf data.kpp \
data.obcs data.pkg eedata )
- cp -pv ../inputs/$FILE .
+ cp -pv ../inputs/$FILE . || exit 1
end
# copy the files used by data&PARM05 - input datasets
# These get overwritten ... so maybe we dont actually copy them
-foreach FILE ( bathymetry.bin gom_H_199601.bin gom_S_199601.bin \
- gom_T_199601.bin gom_U_199601.bin gom_V_199601.bin )
-# cp -pv ../inputs/$FILE .
+#foreach FILE ( bathymetry.bin gom_H_199601.bin gom_S_199601.bin \
+# gom_T_199601.bin gom_U_199601.bin gom_V_199601.bin )
+foreach FILE ( bathymetry.bin )
+ cp -pv ../inputs/$FILE . || exit 2
end
# link the files used by data.exf&EXF_NML_02 - external forcings
@@ -63,7 +64,7 @@
ncep_shum_19960101.bin \
ncep_uwnd_19960101.bin \
ncep_vwnd_19960101.bin )
- ln -sf ../inputs/$FILE .
+ ln -sf ../inputs/$FILE . || exit 3
end
# link the files used by data.obcs&OBCS_PARM01 - open boundaries
@@ -103,46 +104,42 @@
# data.cal.new ... which contains the appropriate startdate_1, startdate_2
# so data&PARM05 will specify the input data files.
- mv -v ../$input_file assim_model_state_ic
+ mv -v ../$input_file assim_model_state_ic || exit 4
../trans_sv_pv
# Update the MIT namelist output ...
# and rename the input files to those defined in the data&PARM05 namelist.
- # FIXME - somehow the last ensemble member needs to copy the
- # data.cal.new back to the CENTRALDIR ... I think
- mv data.cal.new data.cal
-
set FNAME = `grep -i hydrogSaltFile data | sed -e "s#=##"`
set FNAME = `echo $FNAME | sed -e "s#hydrogSaltFile##"`
set FNAME = `echo $FNAME | sed -e "s#,##g"`
set FNAME = `echo $FNAME | sed -e "s#'##g"`
- mv -v S.*.*.data $FNAME
+ mv -v S.*.*.data $FNAME || exit 5
set FNAME = `grep -i hydrogThetaFile data | sed -e "s#=##"`
set FNAME = `echo $FNAME | sed -e "s#hydrogThetaFile##"`
set FNAME = `echo $FNAME | sed -e "s#,##g"`
set FNAME = `echo $FNAME | sed -e "s#'##g"`
- mv -v T.*.*.data $FNAME
+ mv -v T.*.*.data $FNAME || exit 5
set FNAME = `grep -i uVelInitFile data | sed -e "s#=##"`
set FNAME = `echo $FNAME | sed -e "s#uVelInitFile##"`
set FNAME = `echo $FNAME | sed -e "s#,##g"`
set FNAME = `echo $FNAME | sed -e "s#'##g"`
- mv -v U.*.*.data $FNAME
+ mv -v U.*.*.data $FNAME || exit 5
set FNAME = `grep -i vVelInitFile data | sed -e "s#=##"`
set FNAME = `echo $FNAME | sed -e "s#vVelInitFile##"`
set FNAME = `echo $FNAME | sed -e "s#,##g"`
set FNAME = `echo $FNAME | sed -e "s#'##g"`
- mv -v V.*.*.data $FNAME
+ mv -v V.*.*.data $FNAME || exit 5
set FNAME = `grep -i pSurfInitFile data | sed -e "s#=##"`
set FNAME = `echo $FNAME | sed -e "s#pSurfInitFile##"`
set FNAME = `echo $FNAME | sed -e "s#,##g"`
set FNAME = `echo $FNAME | sed -e "s#'##g"`
- mv -v Eta.*.*.data $FNAME
+ mv -v Eta.*.*.data $FNAME || exit 5
# set pattern="s/.*'\(.*\)'.*/\1/"
# set zfilename=`echo $zoneline | sed -e $pattern`
@@ -161,6 +158,11 @@
# Advance the model saving standard out
mpirun.lsf ../mitgcmuv_20p
+ # Remove the snapshot file at time zero.
+ # We are interested in the snapshot file at the end of the advance.
+ # Daily advances ... with a timestep of 900 seconds 86400/900 = 96
+ rm *.0000000000.*
+
# Extract the timestep from the ocean model output files.
set TIMESTEP = `ls -1 S.*.data`
set TIMESTEP = $TIMESTEP:r
@@ -169,7 +171,7 @@
echo $TIMESTEP | ../trans_pv_sv
# Move the updated state vector back to 'centraldir'
- mv assim_model_state_ud ../$output_file
+ mv assim_model_state_ud ../$output_file || exit 5
@ state_copy++
@ ensemble_member_line = $ensemble_member_line + 3
@@ -177,10 +179,11 @@
@ output_file_line = $output_file_line + 3
end
+mv data.cal.new ../data.cal
+
# Change back to original directory and get rid of temporary directory
-ls -l > ../directory_contents_$ensemble_member
cd ..
-\rm -rf $temp_dir
+# \rm -rf $temp_dir
# Remove the filter_control file to signal completion
# Is there a need for any sleeps to avoid trouble on completing moves here?
Modified: DART/trunk/models/MITgcm_ocean/shell_scripts/job.simple.csh
===================================================================
--- DART/trunk/models/MITgcm_ocean/shell_scripts/job.simple.csh 2008-05-22 22:48:33 UTC (rev 3383)
+++ DART/trunk/models/MITgcm_ocean/shell_scripts/job.simple.csh 2008-05-22 23:58:59 UTC (rev 3384)
@@ -111,8 +111,8 @@
${COPY} ${DARTMITDIR}/work/obs_seq.out .
${COPY} ${DARTMITDIR}/work/filter_ics .
${COPY} ${DARTMITDIR}/work/input.nml .
-cp -pv inputs/data .
-cp -pv inputs/data.cal .
+${COPY} inputs/data .
+${COPY} inputs/data.cal .
#-----------------------------------------------------------------------------
# Ensure the (output) experiment directory exists
Modified: DART/trunk/models/MITgcm_ocean/shell_scripts/runme_filter
===================================================================
--- DART/trunk/models/MITgcm_ocean/shell_scripts/runme_filter 2008-05-22 22:48:33 UTC (rev 3383)
+++ DART/trunk/models/MITgcm_ocean/shell_scripts/runme_filter 2008-05-22 23:58:59 UTC (rev 3384)
@@ -31,7 +31,7 @@
#BSUB -q dedicated
#BSUB -n 20
#BXXX -P 868500xx
-#BSUB -W 12:00
+#BSUB -W 2:00
#BSUB -N -u ${USER}@ucar.edu
#
##=============================================================================
@@ -98,10 +98,10 @@
# running in parallel. then it runs wakeup_filter to wake
# up filter so it can continue.
- rm -f model_to_filter.lock filter_to_model.lock
- mkfifo model_to_filter.lock filter_to_model.lock
+ \rm -f model_to_filter.lock filter_to_model.lock
+ mkfifo model_to_filter.lock filter_to_model.lock
- set filterhome = ~/.filter
+ set filterhome = ~/.filter$$
if ( ! -e $filterhome) mkdir $filterhome
# this starts filter but also returns control back to
Modified: DART/trunk/models/MITgcm_ocean/shell_scripts/runmodel_1x
===================================================================
--- DART/trunk/models/MITgcm_ocean/shell_scripts/runmodel_1x 2008-05-22 22:48:33 UTC (rev 3383)
+++ DART/trunk/models/MITgcm_ocean/shell_scripts/runmodel_1x 2008-05-22 23:58:59 UTC (rev 3384)
@@ -1,4 +1,4 @@
-#!/bin/tcsh
+#!/bin/tcsh
#
# Data Assimilation Research Testbed -- DART
# Copyright 2004-2008, Data Assimilation Research Section,
@@ -20,12 +20,13 @@
# -q queue cheapest == [standby, economy, (regular,debug), premium] == $$$$
# -n number of processors (really)
##=============================================================================
-#BSUB -J mitgcmuv
-#BSUB -o mitgcmuv.%J.log
-#BSUB -q regular
+#BSUB -J testrun
+#BSUB -o testrun.%J.log
+#BSUB -q dedicated
+#BSUB -N -u ${USER}@ucar.edu
#BSUB -n 20
+#BSUB -W 12:00
#BXXX -P nnnnnnnn
-#BSUB -W 12:00
#
##=============================================================================
## This block of directives constitutes the preamble for the PBS queuing system
@@ -45,10 +46,10 @@
## on the node with another job, so you might as well use
## them both. (ppn == Processors Per Node)
##=============================================================================
-#PBS -N mitgcmuv
+#PBS -N testrun
#PBS -r n
-#PBS -e mitgcmuv.err
-#PBS -o mitgcmuv.log
+#PBS -e testrun.err
+#PBS -o testrun.log
#PBS -q medium
#PBS -l nodes=10:ppn=2
@@ -61,14 +62,25 @@
if ($?LS_SUBCWD) then
# LSF has a list of processors already in a variable (LSB_HOSTS)
-
- mpirun.lsf ./mitgcmuv
+
+ setenv JOBNAME $LSB_OUTPUTFILE:ar
+ setenv CENTRALDIR $LS_SUBCWD
+ setenv TMPDIR /ptmp/${user}
+endif
+ setenv SRCDIR /fs/image/home/${user}/SVN/DART/models/MITgcm_ocean
+
+ set advance_command = 'mpirun.lsf ./mitgcmuv_20p'
else if ($?PBS_O_WORKDIR) then
+ setenv JOBNAME $PBS_JOBNAME
+ setenv CENTRALDIR $PBS_O_WORKDIR
+ setenv TMPDIR /ptmp/${user}
+ setenv SRCDIR /fs/image/home/${user}/SVN/DART/models/MITgcm_ocean
+
# PBS has a list of processors in a file whose name is (PBS_NODEFILE)
- mpirun ./mitgcmuv
+ set advance_command = 'mpirun ./mitgcmuv_20p'
else if ($?MYNODEFILE) then
@@ -91,7 +103,12 @@
setenv NUM_PROCS 8
echo "running with $NUM_PROCS nodes specified from $MYNODEFILE"
- mpirun -np $NUM_PROCS -nolocal -machinefile $MYNODEFILE ./mitgcmuv
+ setenv JOBNAME Oceans
+ setenv CENTRALDIR `pwd`
+ setenv TMPDIR /ptmp/${user}
+ setenv SRCDIR /fs/image/home/${user}/SVN/DART/models/MITgcm_ocean
+
+ set advance_command = "mpirun -np $NUM_PROCS -nolocal -machinefile $MYNODEFILE ./mitgcmuv_20p"
else
@@ -99,8 +116,56 @@
# already run 'lamboot' once to start the lam server, or that you
# are running with a machine that has mpich installed.
- echo "running interactively"
- mpirun -np 2 ./mitgcmuv
+ echo "running interactively ... NOT"
+ # mpirun -np 2 ./mitgcmuv_20p
endif
+mkdir -p ${TMPDIR}/${JOBNAME}
+cd ${TMPDIR}/${JOBNAME}
+
+# Copy the namelist files - these are small, so we really copy them
+foreach FILE ( data data.cal data.exf data.kpp \
+ data.obcs data.pkg eedata )
+ cp -p ${SRCDIR}/inputs/$FILE .
+end
+
+# link the files used by data&PARM05 - input datasets
+foreach FILE ( bathymetry.bin gom_H_199601.bin gom_S_199601.bin \
+ gom_T_199601.bin gom_U_199601.bin gom_V_199601.bin )
+ ln -sf ${SRCDIR}/inputs/$FILE .
+end
+
+# link the files used by data.exf&EXF_NML_02 - external forcings
+foreach FILE ( lev05_monthly_sss_relax.bin \
+ lev05_monthly_sst_relax.bin \
+ run-off.bin_1x1 \
+ ncep_air_19960101.bin \
+ ncep_dlwrf_19960101.bin \
+ ncep_dswrf_19960101.bin \
+ ncep_nswrs_19960101.bin \
+ ncep_prate_19960101.bin \
+ ncep_shum_19960101.bin \
+ ncep_uwnd_19960101.bin \
+ ncep_vwnd_19960101.bin )
+ ln -sf ${SRCDIR}/inputs/$FILE .
+end
+
+# link the files used by data.obcs&OBCS_PARM01 - open boundaries
+foreach FILE ( Rs_SobcsE_52_01_nPx1.bin Rs_SobcsN_52_01_nPy1.bin \
+ Rs_TobcsE_52_01_nPx1.bin Rs_TobcsN_52_01_nPy1.bin \
+ Rs_UobcsE_52_01_nPx1_c1.bin Rs_UobcsN_52_01_nPy1.bin \
+ Rs_VobcsE_52_01_nPx1.bin Rs_VobcsN_52_01_nPy1_c1.bin)
+ ln -sf ${SRCDIR}/inputs/$FILE .
+end
+
+cp -p ${SRCDIR}/work/mitgcmuv_20p .
+
+eval ${advance_command}
+
+set OUTPUTDIR = ${CENTRALDIR}/work/${JOBNAME}
+mkdir -p ${OUTPUTDIR}
+mv -f *.data *.meta STD* ${OUTPUTDIR}
+
+ls -al
+
Modified: DART/trunk/models/MITgcm_ocean/work/input.nml
===================================================================
--- DART/trunk/models/MITgcm_ocean/work/input.nml 2008-05-22 22:48:33 UTC (rev 3383)
+++ DART/trunk/models/MITgcm_ocean/work/input.nml 2008-05-22 23:58:59 UTC (rev 3384)
@@ -1,7 +1,7 @@
&perfect_model_obs_nml
start_from_restart = .true.,
output_restart = .true.,
- async = 0,
+ async = 4,
init_time_days = -1,
init_time_seconds = -1,
first_obs_days = -1,
@@ -12,27 +12,27 @@
restart_in_file_name = "perfect_ics",
restart_out_file_name = "perfect_restart",
obs_seq_in_file_name = "obs_seq.in",
- obs_seq_out_file_name = "obs_seq.out",
+ obs_seq_out_file_name = "obs_seq.perfect",
adv_ens_command = "./advance_model.csh" /
&filter_nml
- async = 0,
+ async = 4,
adv_ens_command = "./advance_model.csh",
- ens_size = 4,
- start_from_restart = .false.,
+ ens_size = 20,
+ start_from_restart = .true.,
output_restart = .true.,
obs_sequence_in_name = "obs_seq.out",
obs_sequence_out_name = "obs_seq.final",
- restart_in_file_name = "perfect_ics",
+ restart_in_file_name = "filter_ics",
restart_out_file_name = "filter_restart",
- init_time_days = -1,
- init_time_seconds = -1,
- first_obs_days = -1,
- first_obs_seconds = -1,
- last_obs_days = -1,
- last_obs_seconds = -1,
+ init_time_days = 144270,
+ init_time_seconds = 43200,
+ first_obs_days = 144270,
+ first_obs_seconds = 0,
+ last_obs_days = 144271,
+ last_obs_seconds = 86400,
num_output_state_members = 4,
- num_output_obs_members = 0,
+ num_output_obs_members = 20,
output_interval = 1,
num_groups = 1,
input_qc_threshold = 4.0,
@@ -113,7 +113,6 @@
model_perturbation_amplitude = 0.2,
output_state_vector = .false. /
-
&location_nml
horiz_dist_only = .true.,
vert_normalization_pressure = 100000.0,
@@ -160,23 +159,23 @@
&obs_diag_nml
obs_sequence_name = 'obs_seq.final',
- first_bin_center = 1601, 1, 1, 0, 0, 0 ,
- last_bin_center = 1601, 1, 1, 3, 0, 0 ,
- bin_separation = 0, 0, 0, 0, 3, 0 ,
- bin_width = 0, 0, 0, 0, 3, 0 ,
+ first_bin_center = 1996, 1, 1,12, 0, 0 ,
+ last_bin_center = 1996, 1,14,12, 0, 0 ,
+ bin_separation = 0, 0, 1, 0, 0, 0 ,
+ bin_width = 0, 0, 1, 0, 0, 0 ,
time_to_skip = 0, 0, 0, 0, 0, 0 ,
max_num_bins = 1000,
rat_cri = 3.0,
input_qc_threshold = 4.0,
- Nregions = 4,
- lonlim1 = 0.0, 0.0, 0.0, 235.0,
- lonlim2 = 360.0, 360.0, 360.0, 295.0,
+ Nregions = 1,
+ lonlim1 = 250.0, 0.0, 0.0, 235.0,
+ lonlim2 = 300.0, 360.0, 360.0, 295.0,
latlim1 = 20.0, -80.0, -20.0, 25.0,
- latlim2 = 80.0, -20.0, 20.0, 55.0,
- reg_names = 'Northern Hemisphere', 'Southern Hemisphere', 'Tropics', 'North America',
+ latlim2 = 40.0, -20.0, 20.0, 55.0,
+ reg_names = 'Gulf of Mexico', 'Southern Hemisphere', 'Tropics', 'North America',
print_mismatched_locs = .false.,
- print_obs_locations = .true.,
- verbose = .false. /
+ print_obs_locations = .false.,
+ verbose = .true. /
&ocean_obs_nml
year = 1996,
Modified: DART/trunk/models/MITgcm_ocean/work/workshop_setup.csh
===================================================================
--- DART/trunk/models/MITgcm_ocean/work/workshop_setup.csh 2008-05-22 22:48:33 UTC (rev 3383)
+++ DART/trunk/models/MITgcm_ocean/work/workshop_setup.csh 2008-05-22 23:58:59 UTC (rev 3384)
@@ -92,8 +92,6 @@
endsw
end
-exit
-
#----------------------------------------------------------------------
# Build the MPI-enabled target(s)
#----------------------------------------------------------------------
More information about the Dart-dev
mailing list