[Dart-dev] [3291] DART/trunk/models: Initial commit of the
interface files to the
nancy at subversion.ucar.edu
nancy at subversion.ucar.edu
Fri Apr 4 16:14:27 MDT 2008
An HTML attachment was scrubbed...
URL: http://mailman.ucar.edu/pipermail/dart-dev/attachments/20080404/63081095/attachment-0001.html
-------------- next part --------------
Added: DART/trunk/models/am2/column_rand.f90
===================================================================
--- DART/trunk/models/am2/column_rand.f90 (rev 0)
+++ DART/trunk/models/am2/column_rand.f90 2008-04-04 22:14:26 UTC (rev 3291)
@@ -0,0 +1,160 @@
+! Data Assimilation Research Testbed -- DART
+! Copyright 2004-2007, Data Assimilation Research Section
+! University Corporation for Atmospheric Research
+! Licensed under the GPL -- www.gpl.org/licenses/gpl.html
+
+program column_rand
+
+! <next few lines under version control, do not edit>
+! $URL$
+! $Id: column_rand.f90 2713 2007-03-26 04:09:04Z thoar $
+! $Revision$
+! $Date: 2007-03-26 00:09:04 -0400 (Mon, 26 Mar 2007) $
+
+! Allows creation of input file for generating a set of randomly located
+! observation stations with full column of obs for CAM model.
+
+use types_mod, only : r8, PI
+use utilities_mod, only : get_unit, initialize_utilities, finalize_utilities
+use random_seq_mod, only : random_seq_type, init_random_seq, random_uniform
+
+implicit none
+
+! version controlled file description for error handling, do not edit
+character(len=128), parameter :: &
+ source = "$URL$", &
+ revision = "$Revision$", &
+ revdate = "$Date: 2007-03-26 00:09:04 -0400 (Mon, 26 Mar 2007) $"
+
+integer, allocatable :: levels(:)
+integer :: level, num_cols, num_levs, i, iunit
+real(r8) :: lat, lon, t_err_var, uv_err_var, ps_err_var, q_err_var
+type(random_seq_type) :: r
+
+! Initialize the utilities
+call initialize_utilities('Column_rand')
+
+! Initialize the random sequence
+call init_random_seq(r)
+
+! Open an output file and write header info
+iunit = get_unit()
+open(unit = iunit, file = 'cam_column_rand.out')
+
+write(*, *) 'input the number of columns per set'
+read(*, *) num_cols
+
+write(*, *) 'input the number of model levels in column'
+read(*, *) num_levs
+
+allocate(levels(num_levs))
+do i = 1, num_levs
+ write(*, *) 'Input vertical level ', i
+ read(*, *) levels(i)
+end do
+
+! Output the total number of obs in set; Q is being observed, too
+write(*, *) 'total num is ', num_cols * (num_levs * 4 + 1)
+write(iunit, *) num_cols * (num_levs * 4 + 1)
+
+! No copies or qc
+write(iunit, *) 0
+write(iunit, *) 0
+
+! First get error variance for surface pressure
+write(*, *) 'Input error VARIANCE for surface pressure obs'
+read(*, *) ps_err_var
+
+! Get error variance for t, and u and v
+write(*, *) 'Input error VARIANCE for T obs'
+read(*, *) t_err_var
+write(*, *) 'Input error VARIANCE for U and V obs'
+read(*, *) uv_err_var
+write(*, *) 'Input error VARIANCE for Q obs'
+read(*, *) q_err_var
+
+
+! Loop through each column
+do i = 1, num_cols
+
+ ! Get a random lon lat location for this column
+ ! Longitude is random from 0 to 360
+ lon = random_uniform(r) * 360.0_r8
+
+ ! Latitude must be area weighted
+ lat = asin(random_uniform(r) * 2.0_r8 - 1.0_r8)
+
+ ! Now convert from radians to degrees latitude
+ lat = lat * 360.0_r8 / (2.0_r8 * PI)
+
+ ! Do ps ob
+ write(iunit, *) 0
+ ! Kind for surface pressure is 3
+ write(iunit, *) 3
+ write(iunit, *) 1
+ ! Level is -1 for ps
+ write(iunit, *) -1
+ write(iunit, *) lon
+ write(iunit, *) lat
+ write(iunit, *) 0, 0
+ write(iunit, *) ps_err_var
+
+ ! Loop through each observation in the column
+ do level = 1, num_levs
+
+ ! Write out the t observation
+ write(iunit, *) 0
+ ! Kind for t is 4
+ write(iunit, *) 4
+ write(iunit, *) 1
+ write(iunit, *) levels(level)
+ write(iunit, *) lon
+ write(iunit, *) lat
+ write(iunit, *) 0, 0
+ write(iunit, *) t_err_var
+
+
+ ! Write out the u observation
+ write(iunit, *) 0
+ ! Kind for u is 1
+ write(iunit, *) 1
+ write(iunit, *) 1
+ write(iunit, *) levels(level)
+ write(iunit, *) lon
+ write(iunit, *) lat
+ write(iunit, *) 0, 0
+ write(iunit, *) uv_err_var
+
+
+ ! Write out the v observation
+ write(iunit, *) 0
+ ! Kind for v is 2
+ write(iunit, *) 2
+ write(iunit, *) 1
+ write(iunit, *) levels(level)
+ write(iunit, *) lon
+ write(iunit, *) lat
+ write(iunit, *) 0, 0
+ write(iunit, *) uv_err_var
+
+
+ ! Write out the q observation
+ write(iunit, *) 0
+ ! Kind for q is 5
+ write(iunit, *) 5
+ write(iunit, *) 1
+ write(iunit, *) levels(level)
+ write(iunit, *) lon
+ write(iunit, *) lat
+ write(iunit, *) 0, 0
+ write(iunit, *) q_err_var
+
+ end do
+end do
+
+write(iunit, *) 'set_def.out'
+
+! Shut down the utilities cleanly
+call finalize_utilities()
+
+end program column_rand
Property changes on: DART/trunk/models/am2/column_rand.f90
___________________________________________________________________
Name: svn:keywords
+ "Date Rev Author URL Id"
Added: DART/trunk/models/am2/model_mod.f90
===================================================================
--- DART/trunk/models/am2/model_mod.f90 (rev 0)
+++ DART/trunk/models/am2/model_mod.f90 2008-04-04 22:14:26 UTC (rev 3291)
@@ -0,0 +1,1506 @@
+! Data Assimilation Research Testbed -- DART
+! Copyright 2004-2007, Data Assimilation Research Section
+! University Corporation for Atmospheric Research
+! Licensed under the GPL -- www.gpl.org/licenses/gpl.html
+
+module model_mod
+
+ ! <next few lines under version control, do not edit>
+ ! $URL$
+ ! $Id: $
+ ! $Revision$
+ ! $Date: $
+
+ !----------------------------------------------------------------------
+ ! purpose: interface between AM2 and DART
+ ! Translate to/from state_vector and restart file(s)
+ ! Initialize model
+ ! Write out model fields to Prior and Posterior_Diag.nc,
+ ! Generate expected obs from model state (model_interpolate)
+ ! Find state variables (or obs) that are close to a given base observation.
+ ! (get_close_obs)
+ ! author: Robert Pincus, CIRES/NOAA ESRL PSD1
+ !==============================================================================================
+ use netcdf
+ use types_mod, only : r8
+ use time_manager_mod, only : time_type, set_time, print_time, set_calendar_type, GREGORIAN
+ use utilities_mod, only : open_file, close_file, find_namelist_in_file, check_namelist_read, &
+ register_module, error_handler, file_exist, E_ERR, E_WARN, E_MSG, &
+ logfileunit, do_output, nc_check
+ use mpi_utilities_mod, only : my_task_id, task_count
+ use location_mod, only : location_type, get_close_maxdist_init, &
+ get_close_obs_init, get_close_obs, set_location, &
+ get_location, vert_is_level, vert_is_pressure
+ use obs_kind_mod, only : KIND_U_WIND_COMPONENT, KIND_V_WIND_COMPONENT, &
+ KIND_SURFACE_PRESSURE, KIND_TEMPERATURE, &
+ KIND_SPECIFIC_HUMIDITY, KIND_PRESSURE, &
+ KIND_CLOUD_LIQUID_WATER, KIND_CLOUD_ICE, &
+ ! We'll need to add a kind_cloud_fraction to correspond to AM2 prog var
+ get_obs_kind_index, get_obs_kind_var_type
+ use location_mod, only: VERTISSURFACE, VERTISLEVEL
+
+ implicit none
+ private
+
+ !
+ ! Standard DART interface
+ !
+ public :: model_type, &
+ get_model_size, &
+ adv_1step, &
+ get_state_meta_data, &
+ model_interpolate, &
+ get_model_time_step, &
+ end_model, &
+ static_init_model, &
+ init_time, &
+ init_conditions, &
+ nc_write_model_atts, &
+ nc_write_model_vars, &
+ pert_model_state, &
+ get_close_maxdist_init, &
+ get_close_obs_init, &
+ get_close_obs, &
+ ens_mean_for_model
+ !
+ ! Procedures from CAM model_mod used in CAM utilities
+ !
+ public :: prog_var_to_vector, &
+ vector_to_prog_var, &
+ init_model_instance, &
+ end_model_instance, &
+ read_model_init, &
+ write_model_init
+ !==============================================================================================
+ !
+ ! Global declarations
+ !
+
+ type model_type
+ !
+ ! "Prognostic variables"
+ ! Surface pressure may be computed from pressure thickness (delp) on the fly
+ !
+ real, pointer, dimension(:, :) :: ps => null()
+ real, pointer, dimension(:, :, :) :: u => null(), v => null(), T => null(), delp => null()
+ ! Tracers; could generalize to 2D and 3D collections
+ real, pointer, dimension(:, :, :, :) :: tracers => null()
+ real, pointer, dimension(:) :: tracerTypes => null() ! Dart obs type associated with each tracer
+ end type model_type
+
+ !----------------------------------------------------------------------
+ ! Namelist variables with default values follow
+
+ !----------------------------------------------------------------------
+ integer, parameter :: maxnum_tracers = 10
+ character(len = nf90_max_name), dimension(maxnum_tracers) :: &
+ tracer_names = '', tracer_files = '', tracer_obs_kind_names = "", tracer_config_files = ''
+ integer, dimension(maxnum_tracers) :: tracer_obs_kinds = -1
+
+ ! output_state_vector = .true. results in a "state-vector" netCDF file
+ ! output_state_vector = .false. results in a "prognostic-var" netCDF file
+ logical :: output_state_vector = .false.
+
+ ! File where basic info about model configuration can be found
+ character(len = nf90_max_name) :: model_config_file = 'fv_rst.res.nc', &
+ model_restart_file = 'fv_rst.res.nc'
+
+ ! Define location restrictions on which observations are assimilated
+ ! (values are calculated anyway, but istatus is set to 2)
+ real(r8) :: max_obs_lat_degree = 90.0_r8
+ real(r8) :: highest_obs_pressure_mb = 150.0_r8
+ real(r8) :: highest_state_pressure_mb = 150.0_r8
+
+ ! Specify shortest time step that the model will support
+ ! This may be limited below by the model itself
+ integer :: Time_step_seconds = 21600, Time_step_days = 0
+
+ namelist /model_nml/ &
+ tracer_names, tracer_files, tracer_config_files, &
+ tracer_obs_kind_names, output_state_vector, &
+ model_config_file, model_restart_file, &
+ highest_obs_pressure_mb, highest_state_pressure_mb, max_obs_lat_degree, &
+ Time_step_seconds, Time_step_days
+ !----------------------------------------------------------------------
+ !
+ ! Useful global storage
+ !
+
+ type(time_type) :: Time_step_atmos
+ !
+ ! Model top pressure is used for converting pressure thickness delp to surface pressure
+ ! It should ideally be availible in phalf but the restart files I have don't have
+ ! valid values in that dimension variable.
+ ! The dimension variables have units of mb but delp has units of Pa.
+ !
+ real(kind = r8), parameter :: mb_to_pa = 100., model_top_pressure = 1. * mb_to_pa
+
+ integer :: num_tracers, num_lats, num_lons, num_levels, model_size
+
+ !
+ ! We assume here that all 2D prognostic variables are functions of lat, lon
+ !
+ integer, parameter :: num_3d_prog_vars = 3, num_2d_prog_vars = 1
+ character(len = 1), dimension(num_3d_prog_vars), parameter :: &
+ names_3d_prog_vars = (/ "U", "V", "T" /)
+ character(len = 2), dimension(num_2d_prog_vars), parameter :: &
+ names_2d_prog_vars = (/ "PS" /)
+ integer, dimension(num_3d_prog_vars), parameter :: &
+ kinds_3d_prog_vars = (/ KIND_U_WIND_COMPONENT, KIND_V_WIND_COMPONENT, KIND_TEMPERATURE /)
+ integer, dimension(num_2d_prog_vars), parameter :: &
+ kinds_2d_prog_vars = (/ KIND_SURFACE_PRESSURE /)
+
+ !
+ ! Information related to dimensions (coordinates)
+ !
+ integer, parameter :: num_dims = 6
+ character(len = 5), dimension(num_dims), parameter :: &
+ dim_names = (/ 'lat ', 'latu ', 'lon ', 'lonv ', 'pfull', 'phalf' /)
+ !
+ ! Dimension ids and dimension variable ids for each of the dimensions in the model_config_file
+ !
+ integer, dimension(num_dims) :: dim_lens, dim_var_ids
+ real, allocatable, dimension(:) :: lat, latu, lon, lonv, pfull, phalf
+
+ !
+ ! Surface geopotential and ak, bk terms, read from model_config_file,
+ ! used for pressure computations
+ !
+ real, dimension(:, :), allocatable :: surface_geopotential
+ real, dimension(:), allocatable :: ak, bk, akmid, bkmid
+ !-----------------------------------------------------------------------
+ ! version controlled file description for error handling, do not edit
+ character(len=128) :: version = "$Revision$"
+ character(len=128) :: tag = "$Id: model_mod.f90 3163 2007-12-07 20:39:48Z nancy $"
+ character(len=128), parameter :: &
+ source = "$URL$", &
+ revision = "$Revision$", &
+ revdate = "$Date: 2007-12-07 15:39:48 -0500 (Fri, 07 Dec 2007) $"
+ !-----------------------------------------------------------------------
+
+contains
+ ! ----------------------------------------------------------------------------
+ !
+ ! Public procedures
+ !
+ ! ----------------------------------------------------------------------------
+
+ subroutine static_init_model()
+ !
+ ! Initializes class data for model
+ !
+
+ ! Local variables
+ !
+ integer :: iunit, io, ens_member, num_tasks, my_task
+ integer :: ncfileid, ncvarid, i, index
+ logical :: do_out
+ ! --------------------------------------------------------------------------
+
+ call register_module(source, revision, revdate)
+
+ ! Calendar information is not passed to the model; it must be set in the model namelist
+ call set_calendar_type(GREGORIAN)
+
+ call find_namelist_in_file("input.nml", "model_nml", iunit)
+ read(iunit, nml = model_nml, iostat = io)
+ call check_namelist_read(iunit, io, "model_nml")
+
+ ! set the printed output logical variable to reduce printed output;
+ ! depends on whether this is being called by trans_... (read ens member # from file 'element' )
+ ! or by filter (multiple processes, printout controlled by do_output())
+ if (file_exist('element')) then
+ ! debug; fix this ugliness
+ !
+ ! Inherited from CAM model_mod; "element" exists for all processes save ???
+ !
+ open(unit = 99, file='element', form = 'formatted')
+ read(99,*) ens_member
+ close(99)
+ do_out = .false.
+ if (ens_member == 1) do_out = .true.
+ else
+ do_out = do_output()
+ ! static_init_model is called once(?) for each task(?).
+ ! There may be more or fewer ensemble members than tasks.
+ ! No problem if there are fewer.
+ ! In pert_model_state generate a unique ens_member from my_task and globally stored info
+ ! about previous calls to pert_model_state.
+ num_tasks = task_count()
+ my_task = my_task_id()
+ end if
+
+ ! Record the namelist values
+ if (do_out) write(logfileunit, nml = model_nml)
+
+ ! Set the model minimum time step from the namelist seconds and days input
+ Time_step_atmos = set_time(Time_step_seconds, Time_step_days)
+ if (do_out) call print_time(Time_step_atmos)
+
+ !
+ ! Open config file; read number of and values of our six dimensions
+ ! Checking the tracers would require having a "config" file for each of the tracer fields
+ !
+ call nc_check(nf90_open(path = trim(model_config_file), mode = nf90_nowrite, ncid = ncfileid), &
+ 'static_init_model', 'opening '// trim(model_config_file))
+ call read_dimension_info(ncFileId)
+
+ !
+ ! Read in ak, bk, surface geopotential
+ ! The latter is a function of time but there's only one value of time
+ ! We could check the dimension order for an extra warm-and-fuzzy feeling
+ !
+ allocate(surface_geopotential(num_lons, num_lats), ak(num_levels + 1), bk(num_levels + 1), &
+ akmid(num_levels), bkmid(num_levels))
+
+ call nc_check(nf90_inq_varid(ncfileid, 'ak', ncvarid), 'static_init_model', 'looking for varid of ak')
+ call nc_check(nf90_get_var(ncfileid, ncvarid, ak), 'static_init_model', 'reading ak')
+
+ call nc_check(nf90_inq_varid(ncfileid, 'bk', ncvarid), 'static_init_model', 'looking for varid of bk')
+ call nc_check(nf90_get_var(ncfileid, ncvarid, bk), 'static_init_model', 'reading bk')
+
+ call nc_check(nf90_inq_varid(ncfileid, 'Surface_geopotential', ncvarid), &
+ 'static_init_model', 'looking for varid of Surface_geopotential')
+ call nc_check(nf90_get_var(ncfileid, ncvarid, surface_geopotential), &
+ 'static_init_model', 'reading Surface_geopotential')
+
+ !
+ ! Compute akmid, bkmid for computing pressure at layers
+ !
+ akmid(:num_levels) = ak(:num_levels) + (ak(2:) - ak(:num_levels))/2.
+ bkmid(:num_levels) = bk(:num_levels) + (bk(2:) - bk(:num_levels))/2.
+
+ !
+ ! Count the number of tracer names provided; check to be sure each tracer name
+ ! has a corresponding file to read the tracer from
+ !
+ num_tracers = count(len_trim(tracer_names) > 0)
+ if(count(len_trim(tracer_files) > 0) /= num_tracers) &
+ call error_handler(E_ERR, 'static_model_init', "Different number of model tracer names, tracer files provided", source, revision, revdate)
+ if(count(len_trim(tracer_obs_kind_names) > 0) /= num_tracers) &
+ call error_handler(E_ERR, 'static_model_init', "Different number of model tracer names, tracer kinds provided", source, revision, revdate)
+ do i = 1, num_tracers
+ index = get_obs_kind_index(tracer_obs_kind_names(i))
+ if(index > 0) then
+ tracer_obs_kinds(i) = get_obs_kind_var_type(index)
+ else
+ call error_handler(E_ERR, 'static_model_init', "Tracer type " // trim(tracer_obs_kind_names(i)) // " unknown" , source, revision, revdate)
+ end if
+ end do
+
+ model_size = num_2d_prog_vars * (num_lats * num_lons) + &
+ num_3d_prog_vars * (num_lats * num_lons * num_levels) + &
+ num_tracers * (num_lats * num_lons * num_levels)
+
+ call nc_check(nf90_close(ncfileid), &
+ 'static_init_model', 'closing '// trim(model_config_file))
+ end subroutine static_init_model
+
+ ! ----------------------------------------------------------------------------
+
+ integer function get_model_size()
+ get_model_size = model_size
+ end function get_model_size
+
+ ! ----------------------------------------------------------------------------
+
+ subroutine get_state_meta_data(index_in, location, var_type)
+ integer, intent(in) :: index_in
+ type(location_type), intent(out) :: location
+ integer, intent(out), optional :: var_type
+ !------------------------------------------------------------------
+ !
+ ! Given an integer index into the state vector structure, returns the
+ ! associated location and DART observation kind
+
+ integer :: i, start, finish, field_number, local_type, &
+ local_index, lat_index, lon_index, level_index, &
+ which_vert
+ real(r8) :: local_lat, local_lon, vert_loc
+
+
+ start = 1; field_number = 0; local_index = 0
+
+ !
+ ! Figure out which field the index points to and the index within that field
+ ! Walk through each field, compute its start and end point, and see if index_in
+ ! lies between them.
+ ! Assign the observation type while we're at it.
+ !
+ do i = 1, num_2d_prog_vars
+ finish = start + (num_lons * num_lats) - 1
+
+ if(field_number <= 0) then
+ do i = 1, num_3d_prog_vars
+ finish = start + (num_levels * num_lons * num_lats) - 1
+ if(index_in >= start .and. index_in <= finish) then
+ field_number = num_2d_prog_vars + i
+ local_type = kinds_3d_prog_vars(i)
+ local_index = index_in - start + 1
+ exit
+ end if
+ start = finish + 1
+ end do
+ end if
+
+ if(field_number <= 0) then
+ do i = 1, num_tracers
+ finish = start + (num_levels * num_lons * num_lats) - 1
+ if(index_in >= start .and. index_in <= finish) then
+ field_number = num_2d_prog_vars + num_3d_prog_vars + i
+ local_type = tracer_obs_kinds(i)
+ local_index = index_in - start + 1
+ exit
+ end if
+ start = finish + 1
+ end do
+ end if
+
+ ! Should we check here to ensure we've found a match?
+
+ !
+ ! Compute the x, y, z index. The state vector is stored in order level, lon, lat
+ ! (or lon, lat for 2D fields)
+ !
+ if(field_number <= num_2d_prog_vars) then
+ lat_index = (local_index-1)/num_lons + 1
+ lon_index = mod(local_index-1, num_lons) + 1
+ level_index = 0
+ which_vert = VERTISSURFACE
+ else
+ lat_index = (local_index-1)/(num_lons * num_levels) + 1
+ local_index = mod(local_index-1, num_lons * num_levels) + 1
+ lon_index = (local_index-1)/num_levels + 1
+ level_index = mod(local_index-1, num_levels) + 1
+ which_vert = VERTISLEVEL
+ end if
+ if(lat_index < 0 .or. lat_index > num_lats) &
+ call error_handler(E_ERR,'get_state_meta_data', "Lat calculation out of bounds", source, revision, revdate)
+ if(lon_index < 0 .or. lon_index > num_lons) &
+ call error_handler(E_ERR,'get_state_meta_data', "Lon calculation out of bounds", source, revision, revdate)
+ !
+ ! Convert indexes to lat, lon, level values
+ !
+ local_lat = lat(lat_index); if(local_type == KIND_U_WIND_COMPONENT) local_lat = latu(lat_index)
+ local_lon = lon(lon_index); if(local_type == KIND_V_WIND_COMPONENT) local_lon = lonv(lon_index)
+
+ location = set_location(local_lon, local_lat, real(level_index, r8), which_vert)
+ if(present(var_type)) var_type = local_type
+ end subroutine get_state_meta_data
+
+ ! ----------------------------------------------------------------------------
+
+ function nc_write_model_atts( ncFileID ) result (ierr)
+ ! Writes model-specific attributes to a netCDF file.
+ ! TJH Fri Aug 29 MDT 2003
+ ! Modified for AM2 by Robert Pincus, Dec. 2007
+ !
+ integer, intent(in) :: ncFileID ! netCDF file identifier
+ integer :: ierr ! return value of function
+ !-----------------------------------------------------------------------------------------
+ integer :: unlimitedDimID
+ integer :: MemberDimID, StateVarDimID, TimeDimID, & ! ScalarDimID, &
+ LatDimId, LatuDimId, LonDimId, LonvDimId, pfullDimId, phalfDimId
+ integer :: StateVarID, StateVarVarID
+ integer :: configFileId, configVarId, diagVarId, dim
+ integer :: i, tracer, trcrFileId
+ character(len=129) :: errstring
+ character(len=8) :: crdate ! needed by F90 DATE_AND_TIME intrinsic
+ character(len=10) :: crtime ! needed by F90 DATE_AND_TIME intrinsic
+ character(len=5) :: crzone ! needed by F90 DATE_AND_TIME intrinsic
+ integer, dimension(8) :: values ! needed by F90 DATE_AND_TIME intrinsic
+ character(len=NF90_MAX_NAME) :: str1
+ !-------------------------------------------------------------------------------
+
+ ierr = -1 ! assume it's not going to work
+
+ !
+ ! Make sure ncFileID refers to an open netCDF file, and then put into define mode.
+ ! More dimensions, variables and attributes will be added in this routine.
+ !
+ write(errstring,'(I4)') ncFileID
+ call nc_check(nf90_Inquire(ncFileID, unlimitedDimID = unlimitedDimID), &
+ 'nc_write_model_atts', 'Inquire '// trim(errstring))
+ call nc_check(nf90_Redef(ncFileID), 'nc_write_model_atts', 'Redef '// trim(errstring))
+
+ !
+ ! Write DART rev info and current date & time as global attributes
+ !
+ call DATE_AND_TIME(crdate,crtime,crzone,values)
+ write(str1,'("YYYY MM DD HH MM SS = ",i4, 5(1x,i2.2))') &
+ values(1), values(2), values(3), values(5), values(6), values(7)
+
+ call nc_check(nf90_put_att(ncFileID, NF90_GLOBAL, "creation_date",str1), &
+ 'nc_write_model_atts', 'put_att creation_date'//trim(str1))
+ call nc_check(nf90_put_att(ncFileID, NF90_GLOBAL, "model_revision",revision), &
+ 'nc_write_model_atts', 'put_att model_revision'//trim(revision))
+ call nc_check(nf90_put_att(ncFileID, NF90_GLOBAL, "model_revdate",revdate), &
+ 'nc_write_model_atts', 'put_att model_revdate'//trim(revdate))
+ call nc_check(nf90_put_att(ncFileID, NF90_GLOBAL, "model","AM2"), &
+ 'nc_write_model_atts','put_att model AM2')
+
+ !
+ ! Dimension ids for existing dimensions so we can define variables properly
+ !
+ call nc_check(nf90_inq_dimid(ncid = ncFileID, name = "copy", dimid = MemberDimID), &
+ 'nc_write_model_atts', 'inq_dimid copy')
+ call nc_check(nf90_inq_dimid(ncid = ncFileID, name = "time", dimid = TimeDimID), &
+ 'nc_write_model_atts', 'inq_dimid time')
+ if ( TimeDimID /= unlimitedDimId ) then
+ write(errstring, *)'Time dimension ID ', TimeDimID, 'must match Unlimited Dimension ID ', unlimitedDimId
+ call error_handler(E_ERR,'nc_write_model_atts', errstring, source, revision, revdate)
+ end if
+
+ !
+ ! Define the new dimensions IDs
+ !
+
+ if ( output_state_vector ) then
+ !
+ ! Create a dimension that corresponds to the length of the state vector
+ !
+ call nc_check(nf90_def_dim(ncid=ncFileID, name="StateVariable", &
+ len=model_size, dimid = StateVarDimID), &
+ 'nc_write_model_atts', 'def_dim StateVariable')
+ !
+ ! Define the state vector coordinate variable and its attributes
+ !
+ call nc_check(nf90_def_var(ncid=ncFileID, name="StateVariable", xtype=nf90_int, &
+ dimids=StateVarDimID, varid=StateVarVarID), &
+ 'nc_write_model_atts','def_var state vector')
+ call nc_check(nf90_put_att(ncFileID, StateVarVarID, "long_name", "State Variable ID"), &
+ 'nc_write_model_atts','put_att long_name state vector ')
+ call nc_check(nf90_put_att(ncFileID, StateVarVarID, "units", "indexical"), &
+ 'nc_write_model_atts','put_att units state vector ' )
+ call nc_check(nf90_put_att(ncFileID, StateVarVarID, "valid_range", (/ 1, model_size /)), &
+ 'nc_write_model_atts','put_att valid range state vector ')
+ else
+ !
+ ! We'll copy the dimension definitions (including all the attributes) for the num_dims
+ ! dimensions we're using from the config file into the diagnostics file.
+ ! Variable dim_names holds the dimension names
+ ! We could also do this by looping over all the dimensions in the config file, skipping time
+ !
+ call nc_check(nf90_open(path = trim(model_config_file), mode = nf90_nowrite, ncid = configFileId), &
+ 'nc_write_model_atts', 'opening '// trim(model_config_file) )
+ do dim = 1, num_dims
+ call copy_dim_var_pair(ncFileId, trim(dim_names(dim)), dim_lens(dim), configFileId, 'nc_write_model_atts')
+ end do
+ end if
+
+ !
+ ! Create the variables and their attributes
+ !
+ if ( output_state_vector ) then
+ !
+ ! Define the state vector and its attributes
+ !
+ call nc_check(nf90_def_var(ncid=ncFileID, name="state", xtype=nf90_real, &
+ dimids = (/ StateVarDimID, MemberDimID, unlimitedDimID /), varid=StateVarID), &
+ 'nc_write_model_atts','def_var state vector')
+ call nc_check(nf90_put_att(ncFileID, StateVarID, "long_name", "model state or fcopy"), &
+ 'nc_write_model_atts','put_att long_name model state or fcopy ')
+ call nc_check(nf90_put_att(ncFileID, StateVarId, "vector_to_prog_var","AM2"), &
+ 'nc_write_model_atts','put_att vector_to_prog_var AM2 ')
+ else
+ !
+ ! We'll need all the dimension ids except phalf
+ !
+ call nc_check(nf90_inq_dimid(ncFileId, 'lon', LonDimId), 'nc_write_model_atts', 'finding dimid for lon')
+ call nc_check(nf90_inq_dimid(ncFileId, 'lonv', LonvDimId), 'nc_write_model_atts', 'finding dimid for lonv')
+ call nc_check(nf90_inq_dimid(ncFileId, 'lat', LatDimId), 'nc_write_model_atts', 'finding dimid for lat')
+ call nc_check(nf90_inq_dimid(ncFileId, 'latu', LatuDimId), 'nc_write_model_atts', 'finding dimid for latu')
+ call nc_check(nf90_inq_dimid(ncFileId, 'pfull', pfullDimId), 'nc_write_model_atts', 'finding dimid for pfull')
+
+ !
+ ! We'l write out surface pressure regardless of whether the model is using delp or ps internally
+ ! Then there are three more prognostic variables (u, v, t) and tracers
+ ! Coordinate order in the diagnostic file is [lev], lon, lat, copy, time
+ !
+ ! -------------------
+ !
+ ! Surface pressure - copy units from delp field, write long name, and ignore any other attributes
+ !
+ call nc_check(nf90_def_var(ncFileID, "PS", nf90_real, &
+ (/ lonDimId, latDimId, MemberDimID, TimeDimId /), diagVarId), &
+ 'nc_write_model_atts','def_var PS')
+ call nc_check(nf90_inq_varid(configFileId, "DELP", configVarId), &
+ 'nc_write_model_atts', 'getting varid from config file for DELP' )
+ call nc_check(nf90_copy_att(configFileId, configVarId, "units", ncFileId, diagVarId), &
+ 'nc_write_model_atts', 'copying units attribute for DELP')
+ call nc_check(nf90_put_att(ncFileId, diagVarId, "long_name", "Surface pressure"), &
+ 'nc_write_model_atts', 'writing long_name attribute for DELP' )
+ ! -------------------
+ !
+ ! The other prognostic variables (T, U, V)
+ !
+ call define_3d_real_var(ncFileId, configFileId, &
+ "U", (/ pfullDimId, lonDimId, latuDimId, MemberDimID, TimeDimId /), &
+ 'nc_write_model_atts')
+ call define_3d_real_var(ncFileId, configFileId, &
+ "V", (/ pfullDimId, lonvDimId, latDimId, MemberDimID, TimeDimId /), &
+ 'nc_write_model_atts')
+ call define_3d_real_var(ncFileId, configFileId, &
+ "T", (/ pfullDimId, lonDimId, latDimId, MemberDimID, TimeDimId /), &
+ 'nc_write_model_atts')
+
+ call nc_check(nf90_close(configFileId), &
+ 'nc_write_model_atts', 'closing '// trim(model_config_file))
+ ! -------------------
+ !
+ ! Tracers, which we assume are all defined on levels (not interfaces) and
+ ! on the regular (not staggered) grid
+ !
+ do tracer = 1, num_tracers
+ call nc_check(nf90_open(path = trim(tracer_config_files(tracer)), mode = nf90_nowrite, &
+ ncid = trcrFileId),'nc_write_model_atts', 'opening '// trim(tracer_config_files(tracer)))
+
+ call define_3d_real_var(ncFileId, trcrFileId,tracer_names(tracer), &
+ (/ pfullDimId, lonDimId, latDimId, MemberDimID, TimeDimId /), 'nc_write_model_atts')
+
+ call nc_check(nf90_close(trcrFileId),'nc_write_model_atts', &
+ 'closing' // trim(tracer_config_files(tracer)))
+ end do
+ ! -------------------
+
+ end if
+
+ call nc_check(nf90_enddef(ncfileID), 'nc_write_model_atts','enddef ')
+
+ !
+ ! Fill the coordinate variables
+ !
+ if ( output_state_vector ) then
+ ! Fill the state variable coordinate variable
+ call nc_check(nf90_put_var(ncFileID, StateVarVarID, (/ (i,i=1,model_size) /) ), &
+ 'nc_write_model_atts','put_var StateVar ')
+ else
+ call write_1D_values(ncFileId, "pfull", pfull, 'nc_write_model_atts')
+ call write_1D_values(ncFileId, "phalf", phalf, 'nc_write_model_atts')
+ call write_1D_values(ncFileId, "lat", lat, 'nc_write_model_atts')
+ call write_1D_values(ncFileId, "latu", latu, 'nc_write_model_atts')
+ call write_1D_values(ncFileId, "lon", lon, 'nc_write_model_atts')
+ call write_1D_values(ncFileId, "lonv", lonv, 'nc_write_model_atts')
+ end if
+
+ !
+ ! Flush the buffer and leave netCDF file open
+ !
+ call nc_check(nf90_sync(ncFileID),'nc_write_model_atts', 'sync ')
+ write (*, *) 'nc_write_model_atts: netCDF file ',ncFileID,' is synched ...'
+
+ ierr = 0
+
+ end function nc_write_model_atts
+
+ ! ----------------------------------------------------------------------------
+
+ integer function nc_write_model_vars(ncFileID, statevec, copyindex, timeindex ) result (ierr)
+ !
+ ! Writes the state vector to a diagnostics netcdf file.
+ !
+ integer, intent(in) :: ncFileID ! netCDF file identifier
+ real(r8), dimension(:), intent(in) :: statevec
+ integer, intent(in) :: copyindex, timeindex
+
+ integer :: varId, start, finish, i
+ !
+ ! We'll unpack the state vector into this temporary array before we write it
+ ! We rely on the number of lats and lons being the same on the regular and staggered grids
+ ! Dimension order is consistent with storage order in the state vector and
+ ! the order desired in the diagnostics file (it's not the same as the order in the restart file)
+ !
+ real, dimension(num_levels, num_lons, num_lats) :: tempField
+ logical, dimension(num_levels, num_lons, num_lats) :: allTrue
+ ! --------------------
+ allTrue(:, :, :) = .true.
+ ierr = -1 ! Assume the worst
+ if ( output_state_vector ) then
+ call nc_check(NF90_inq_varid(ncFileID, "state", varId), "nc_write_model_vars", "getting varid for state vector" )
+ call nc_check(NF90_put_var(ncFileID, varId, statevec, start=(/ 1, copyindex, timeindex /)), &
+ "nc_write_model_vars", "writing state vector")
+ else
+ !
+ ! The unpacking code is copied from vector_to_prog_var
+ !
+ ! 2D field - surface pressure
+ !
+ start = 1; finish = start + (num_lons * num_lats) - 1
+ tempField(1, :, :) = unpack(statevec(start:finish), allTrue(1, :, :), field = 0._r8)
+ call nc_check(nf90_inq_varid(ncFileID, "PS", varID), "nc_write_model_vars", "Getting varid for PS")
+ call nc_check(nf90_put_var(ncFileID, varID, tempField(1, :, :),start=(/ 1, 1, copyindex, &
+timeindex /)), "nc_write_model_vars", "Writing PS")
+
+ !
+ ! 3D fields - u, v, and T
+ ! These were stored in dim order level, lon, lat in the state vector and need
+ ! to be mapped back to lon, lat, level
+ !
+ do i = 1, num_3d_prog_vars
+ start = finish + 1; finish = start + (num_levels * num_lons * num_lats) - 1
+ tempField(:, :, :) = unpack(statevec(start:finish), allTrue, field = 0._r8)
+ call nc_check(nf90_inq_varid(ncFileID, names_3d_prog_vars(i), varID), "nc_write_model_vars", "Getting varid for " // names_3d_prog_vars(i))
+ call nc_check(nf90_put_var(ncFileID, varID, tempField,start=(/1,1,1,copyindex,timeindex/)), &
+"nc_write_model_vars", "Writing " // names_3d_prog_vars(i))
+ end do
+
+ !
+ ! Tracers
+ ! These were stored in dim order level, lon, lat, tracer_num in the state vector and need
+ ! to be mapped back to lon, lat, level, tracer_num
+ !
+ do i = 1, num_tracers
+ start = finish + 1; finish = start + (num_levels * num_lons * num_lats ) - 1
+ tempField(:, :, :) = unpack(statevec(start:finish), allTrue, field = 0._r8)
+ call nc_check(nf90_inq_varid(ncFileID, tracer_names(i), varID), "nc_write_model_vars", "Getting varid for " // tracer_names(i))
+ call nc_check(nf90_put_var(ncFileID, varID, tempField,start=(/1,1,1,copyindex,timeindex/)), &
+"nc_write_model_vars", "Writing " // tracer_names(i))
+ end do
+ end if
+ ierr = 0
+ end function nc_write_model_vars
+
+ ! ----------------------------------------------------------------------------
+ !
+ ! Stubs for unimplemented public procedures
+ !
+ ! ----------------------------------------------------------------------------
+
+ subroutine adv_1step(x, time)
+ real(r8), intent(inout) :: x(:)
+ type(time_type), intent(in) :: time
+ !
+ ! Does a single timestep advance of the model. The input value of
+ ! the vector x is the starting condition and x is updated to reflect
+ ! the changed state after a timestep. The time argument is intent
+ ! in and is used for models that need to know the date/time to
+ ! compute a timestep, for instance for radiation computations.
+ ! This interface is only called if the namelist parameter
+ ! async is set to 0 in perfect_model_obs of filter or if the
+ ! program integrate_model is to be used to advance the model
+ ! state as a separate executable. If one of these options
+ ! is not going to be used (the model will only be advanced as
+ ! a separate model-specific executable), this can be a
+ ! NULL INTERFACE.
+ !
+
+
+ end subroutine adv_1step
+
+ ! ----------------------------------------------------------------------------
+
+ subroutine init_time(time)
+ type(time_type), intent(out) :: time
+ !
+ ! Companion interface to init_conditions. Returns a time that is somehow
+ ! appropriate for starting up a long integration of the model.
+ ! At present, this is only used if the namelist parameter
+ ! start_from_restart is set to .false. in the program perfect_model_obs.
+ !
+
+ ! for now, just set to 0
+ time = set_time(0, 0)
+
+ end subroutine init_time
+
+ ! ----------------------------------------------------------------------------
+
+ subroutine init_conditions(x)
+ real(r8), intent(out) :: x(:)
+ !
+ ! Returns a model state vector, x, that is some sort of appropriate
+ ! initial condition for starting up a long integration of the model.
+ ! At present, this is only used if the namelist parameter
+ ! start_from_restart is set to .false. in the program perfect_model_obs.
+ !
+
+ end subroutine init_conditions
+
+ ! ----------------------------------------------------------------------------
+
+ subroutine model_interpolate(x, location, itype, obs_val, istatus)
+ real(r8), intent(in) :: x(:)
+ type(location_type), intent(in) :: location
+ integer, intent(in) :: itype
+ real(r8), intent(out) :: obs_val
+ integer, intent(out) :: istatus
+ !
+ ! Given a state vector, a location, and a model state variable type,
+ ! interpolates the state variable field to that location and returns
+ ! the value in obs_val. The istatus variable should be returned as
+ ! 0 unless there is some problem in computing the interpolation in
+ ! which case an alternate value should be returned. The itype variable
+ ! is a model specific integer that specifies the type of field (for
+ ! instance temperature, zonal wind component, etc.).
+ !
+ ! As per CAM model_mod:
+ !
+ ! istatus meaning return expected obs? assimilate?
+ ! 0 obs and model are fine; yes yes
+ ! 1 fatal problem; no no
+ ! 2 exclude valid obs yes no
+ ! 3 unfamiliar obs type no no
+
+ integer :: variableStart, lat_index, lon_index, height_index
+ integer :: i, j, k
+ real(kind = r8) :: lon_weight, lat_weight
+ real(kind = r8), dimension(3) :: lon_lat_height
+ real(kind = r8), dimension(2, 2) :: cornerValues, psurf, weights ! Dimension order lon, lat
+ integer , dimension(2, 2) :: cornerStarts
+ real(kind = r8), dimension(:), &
+ allocatable :: theseLons, theseLats
+
+ !
+ ! Notes after talking to Jeff A
+ ! We interpolate the KINDS we have: U, V, T, tracers, and PS; we also allow for surface height
+ ! Vertical coordinates can be expressed as level, pressure (most usual), and height
+ ! Height is only used at present for GPS so I won't implement it yet
+ ! We'll interpolate linearly in lat, lon, and vertical coordinate since the
+ ! errors this introduces are small and can be lumped in to "representativeness"
+ ! When the vertical coordinate is pressure the vertical indicies bounding the
+ ! observation location might be different at te different corners - I think the
+ ! easiest way to get around that is to interpolate in the vertical at each of the four
+ ! corners first, then interpolate in the horizontal
+
+ ! -------------------------------------------------------------
+ !
+ ! Which variable are we looking for?
+ ! State vector is ordered ps (2D), u, v, t, tracers
+ !
+ variableStart = -1
+ if(itype == KIND_SURFACE_PRESSURE) then
+ variableStart = 1
+ else if (itype == KIND_U_WIND_COMPONENT) then
+ variableStart = 1 + (num_lons * num_lats)
+ else if (itype == KIND_V_WIND_COMPONENT) then
+ variableStart = 1 + (num_lons * num_lats) + 1 * (num_lons * num_lats * num_levels)
+ else if (itype == KIND_TEMPERATURE) then
+ variableStart = 1 + (num_lons * num_lats) + 2 * (num_lons * num_lats * num_levels)
+ else
+ do i = 1, num_tracers
+ if(itype == tracer_obs_kinds(i)) &
+ variableStart = 1 + (num_lons) * num_lats + (3 + i - 1) * (num_lons * num_lats * num_levels)
+ end do
+ end if
+
+ if(variableStart <= 0) then
+ !
+ ! We don't know how to interpolate this kind of observation
+ !
+ istatus = 3
+ obs_val = -huge(obs_val)
+ else
+ !
+ ! Horizontal interpolation
+ !
+ allocate(theseLons(num_lons), theseLats(num_lats))
+ !
+ ! U and V are on staggered grids
+ !
+ if(itype == KIND_V_WIND_COMPONENT) then
+ theseLons(:) = lonv(:)
+ else
+ theseLons(:) = lon(:)
+ end if
+ if(itype == KIND_U_WIND_COMPONENT) then
+ theseLats(:) = latu(:)
+ else
+ theseLats(:) = lat(:)
+ end if
+
+ lon_lat_height = get_location(location)
+ !
+ ! Choose lon_index such that (lon_index) <= lon; ditto for lat
+ ! Enforce periodicity to be sure that longitude conforms to the current convention (i.e. -180 to 180 or 0 to 360)
+ !
+ lon_lat_height(1) = makePeriodic(lon_lat_height(1), theseLons(1), theseLons(num_lons))
+ lon_index = findIndex(lon_lat_height(1), theseLons(:))
+ lon_weight = 1._r8 - (lon_lat_height(1) - theseLons(lon_index)) / (theseLons(lon_index + 1) - theseLons(lon_index))
+ !
+ ! There's no doubt a smart way to interpolate at the poles, but I'm going to punt
+ !
+ if(lon_lat_height(2) < theseLats(1) .or. lon_lat_height(2) >= theseLats(num_lats)) then
+ !
+ ! Latitude is out of bounds
+ !
+ lat_index = -1
+ istatus = 3
+ obs_val = -huge(obs_val)
+ else
+ lat_index = findIndex(lon_lat_height(2), theseLats(:))
+ lat_weight = 1._r8 - (lon_lat_height(2) - theselats(lat_index)) / (theselats(lat_index + 1) - theselats(lat_index))
+ end if
+ deallocate(theseLons, theseLats)
+
+ if(lat_index > 0) then
+ weights(1, 1) = lon_weight * lat_weight
+ weights(2, 1) = (1._r8 - lon_weight) * lat_weight
+ weights(1, 2) = lon_weight * (1._r8 - lat_weight)
+ weights(2, 2) = (1._r8 - lon_weight) * (1._r8 - lat_weight)
+
+ !
+ ! We don't always need psurf for the interpolation but it's cheap enough to compute
+ !
+ psurf(1, 1) = x(1 + (lat_index - 1) * num_lons + lon_index - 1)
+ psurf(2, 1) = x(1 + (lat_index ) * num_lons + lon_index - 1)
+ psurf(1, 2) = x(1 + (lat_index - 1) * num_lons + lon_index )
+ psurf(2, 2) = x(1 + (lat_index ) * num_lons + lon_index )
+
+ !
+ ! Find the four values that bracket the observation location in the horizontal
+ ! Storage order is level, lon, lat
+ !
+ if(itype == KIND_SURFACE_PRESSURE) then
+ cornerValues(:, :) = psurf(:, :)
+ else
+ !
+ ! Find the four starting locations of the 4 columns that bracket the observation location
+ ! and interpolate within each column
+ !
+ cornerStarts(1, 1) = (variableStart + (lat_index - 1) * (num_lons * num_levels) + (lon_index - 1) * num_levels)
+ cornerStarts(2, 1) = (variableStart + (lat_index - 1) * (num_lons * num_levels) + (lon_index ) * num_levels)
+ cornerStarts(1, 2) = (variableStart + (lat_index ) * (num_lons * num_levels) + (lon_index - 1) * num_levels)
+ cornerStarts(2, 2) = (variableStart + (lat_index ) * (num_lons * num_levels) + (lon_index ) * num_levels)
+
+ !
+ ! We know how to interpolate vertically in level and pressure - are we missing any possibilities?
+ !
+ if(vert_is_level(location)) then
+ forall(i = 1:2, j = 1:2)
+ cornerValues(i, j) = interpolate1D(desiredLocation = lon_lat_height(3), &
+ values = x(cornerStarts(i, j):cornerStarts(i, j) + num_levels - 1), &
+ locations = (/ (real(k, kind = r8), k = 1, num_levels) /) )
+ end forall
+ else if (vert_is_pressure(location)) then
+ !
+ ! Better be sure this is in the right units
+ !
+ forall(i = 1:2, j = 1:2)
+ cornerValues(i, j) = interpolate1D(desiredLocation = lon_lat_height(3), &
+ values = x(cornerStarts(i, j):cornerStarts(i, j) + num_levels - 1), &
+ ! Compute pressure at layer midpoints in this column on the fly
+
+ locations = akmid(:) + bkmid(:) * psurf(i, j) )
+
+ end forall
+ else
+ !
+ ! The requested vertical coordinate isn't a pressure or a level
+ !
+ cornerValues(:, :) = -huge(cornerValues)
+ istatus = 1
+ end if ! Interpolate in level or pressure
+ end if ! Surface pressure or 3D variable
+
+ if(any(cornerValues(:, :) <= -huge(cornerValues))) then
+ !
+ ! One or more of our horizontal values isn't valid
+ ! Likely the vertical location isn't within the range of pressures
+ !
+ obs_val = -huge(cornerValues)
+ istatus = 1
+ else
+ obs_val = sum(weights(:, :) * cornerValues(:, :))
+ !
+ ! Set istatus to ensure we want to assimilate this obs
+ !
+ if(vert_is_pressure(location) .and. &
+ (lon_lat_height(3) < highest_obs_pressure_mb * mb_to_pa .or. &
+ abs(lon_lat_height(2)) > max_obs_lat_degree) ) then
+ istatus = 2
+ else
+ istatus = 0
+ end if
+ end if
+ end if ! Check for valid latitude
+ end if ! Check for valid variable
+
+
+ end subroutine model_interpolate
+
+
+ ! ----------------------------------------------------------------------------
+
+ function get_model_time_step()
+ type(time_type) :: get_model_time_step
+ !
+ ! Returns the the time step of the model; the smallest increment
+ ! in time that the model is capable of advancing the state in a given
+ ! implementation. This interface is required for all applications.
+ !
+
+
+ ! Time_step_atmos is global static storage
+ get_model_time_step = Time_step_atmos
+
+ end function get_model_time_step
+
+ ! ----------------------------------------------------------------------------
+
+ subroutine end_model()
+ !------------------------------------------------------------------
+ !
+ ! Does any shutdown and clean-up needed for model. Can be a NULL
+ ! INTERFACE if the model has no need to clean up storage, etc.
+
+ ! good style ... perhaps you could deallocate stuff (from static_init_model?).
+ ! deallocate(state_loc)
+
+ end subroutine end_model
+
+ ! ----------------------------------------------------------------------------
+
+ subroutine pert_model_state(state, pert_state, interf_provided)
+ real(r8), intent(in) :: state(:)
+ real(r8), intent(out) :: pert_state(:)
+ logical, intent(out) :: interf_provided
+ !------------------------------------------------------------------
+ !
+ ! Perturbs a model state for generating initial ensembles.
+ ! The perturbed state is returned in pert_state.
+ ! A model may choose to provide a NULL INTERFACE by returning
+ ! .false. for the interf_provided argument. This indicates to
+ ! the filter that if it needs to generate perturbed states, it
+ ! may do so by adding an O(0.1) magnitude perturbation to each
+ ! model state variable independently. The interf_provided argument
+ ! should be returned as .true. if the model wants to do its own
+ ! perturbing of states.
+
+
+ interf_provided = .false.
+
+ end subroutine pert_model_state
+
+ ! ----------------------------------------------------------------------------
+
+ subroutine ens_mean_for_model(ens_mean)
+ real(r8), intent(in) :: ens_mean(:)
+
+ end subroutine ens_mean_for_model
+
+ ! ----------------------------------------------------------------------------
+ !
+ ! Public procedures that aren't part of the standard DART interface
+ !
+ ! ----------------------------------------------------------------------------
+
+ subroutine init_model_instance(var)
+ type(model_type), intent(out) :: var
+ !
+ ! Initializes an instance of a model state variable
+ ! In our case this means storage allocation
+ !
+
+ call end_model_instance(var)
+ allocate(var%u (num_lons, num_lats, num_levels), &
+ var%v (num_lons, num_lats, num_levels), &
+ var%T (num_lons, num_lats, num_levels), &
+ var%delp(num_lons, num_lats, num_levels))
+ if(num_tracers > 0) &
+ allocate(var%tracers(num_lons, num_lats, num_levels, num_tracers), &
+ var%tracerTypes( num_tracers))
+
+ end subroutine init_model_instance
+
+ ! ----------------------------------------------------------------------------
+
+ subroutine end_model_instance(var)
+ type(model_type), intent(inout) :: var
+ !
+ ! Ends an instance of a model state variable
+ ! i.e. frees allocated storage
+ !
+
+ if(associated(var%ps )) deallocate(var%ps)
+ if(associated(var%u )) deallocate(var%u)
+ if(associated(var%v )) deallocate(var%v)
+ if(associated(var%T )) deallocate(var%T)
+ if(associated(var%delp)) deallocate(var%delp)
+ if(associated(var%tracers)) deallocate(var%tracers)
+ if(associated(var%tracerTypes)) deallocate(var%tracerTypes)
+
+ end subroutine end_model_instance
+
+ ! ----------------------------------------------------------------------------
+
+ subroutine read_model_init(rst_file_name, var)
+ character(len = *), intent(in) :: rst_file_name
+ type(model_type), intent(inout) :: var
+
+ integer :: ncfileid, delpvarid, uvarid, vvarid, tvarid
+ integer :: ncfileid_t, varID, i
+
+ ! Do restart file first
+ call nc_check(nf90_open(path = trim(rst_file_name), mode = nf90_nowrite, ncid = ncfileid), &
+ 'read_model_init', 'opening '// trim(rst_file_name))
+
+ call nc_check(nf90_inq_varid(ncfileid,"DELP",delpvarid),'read_model_init','inquiring delp varid')
+ call nc_check(nf90_inq_varid(ncfileid,"U",uvarid),'read_model_init','inquiring u varid')
+ call nc_check(nf90_inq_varid(ncfileid,"V",vvarid),'read_model_init','inquiring v varid')
+ call nc_check(nf90_inq_varid(ncfileid,"T",tvarid),'read_model_init','inquiring T varid')
+
+ call nc_check(nf90_get_var(ncfileid,delpvarid,var%delp),'read_model_init','getting delp var')
+ call nc_check(nf90_get_var(ncfileid,uvarid,var%u),'read_model_init','getting u var')
+ call nc_check(nf90_get_var(ncfileid,vvarid,var%v),'read_model_init','getting v var')
+ call nc_check(nf90_get_var(ncfileid,tvarid,var%T),'read_model_init','getting T var')
+
+ ! Do tracer file
+ call nc_check(nf90_open(path = trim(tracer_files(1)), mode = nf90_nowrite, ncid = ncfileid_t), &
+ 'read_model_init', 'opening '// trim(tracer_files(1)))
+
+ do i = 1, num_tracers
+ call nc_check(nf90_inq_varid(ncfileid_t, tracer_names(i), varID), "read_model_init", &
+ "inquiring varid for " // tracer_names(i))
+ call nc_check(nf90_get_var(ncfileid_t, varID, var%tracers(:,:,:,i)), "read_model_init", &
+ "getting " // tracer_names(i))
+ end do
+
+ !Close files
+ call nc_check(nf90_close(ncfileid),'read_model_init','closing restart file')
+ call nc_check(nf90_close(ncfileid_t),'read_model_init','closing tracer file')
+
+ end subroutine read_model_init
+
+ ! ----------------------------------------------------------------------------
+
+ subroutine write_model_init(file_name, var)
+ character(len = *), intent(in) :: file_name
+ type(model_type), intent(in) :: var
+
+ integer :: ncfileid, delpvarid, uvarid, vvarid, tvarid
+ integer :: ncfileid_t, varID, i
+
+ call nc_check(nf90_open(path = trim(file_name), mode = nf90_write, ncid = ncfileid), &
+ 'write_model_init', 'opening ' // trim(file_name))
+
+ call nc_check(nf90_inq_varid(ncfileid,"DELP",delpvarid),'write_model_init','inquiring delp varid')
+ call nc_check(nf90_inq_varid(ncfileid,"U",uvarid),'write_model_init','inquiring u varid')
+ call nc_check(nf90_inq_varid(ncfileid,"V",vvarid),'write_model_init','inquiring v varid')
+ call nc_check(nf90_inq_varid(ncfileid,"T",tvarid),'write_model_init','inquiring T varid')
+
+ call nc_check(nf90_put_var(ncfileid,delpvarid,var%delp),'write_model_init','putting delp var')
+ call nc_check(nf90_put_var(ncfileid,uvarid,var%u),'write_model_init','putting u var')
+ call nc_check(nf90_put_var(ncfileid,vvarid,var%v),'write_model_init','putting v var')
+ call nc_check(nf90_put_var(ncfileid,tvarid,var%T),'write_model_init','putting T var')
+
+ ! Do tracer file
+ call nc_check(nf90_open(path = trim(tracer_files(1)), mode = nf90_write, ncid = ncfileid_t), &
+ 'write_model_init', 'opening '// trim(tracer_files(1)))
+
+ do i = 1, num_tracers
+ call nc_check(nf90_inq_varid(ncfileid_t, tracer_names(i), varID), "write_model_init", &
+ "inquiring varid for " // tracer_names(i))
+ call nc_check(nf90_put_var(ncfileid_t, varID, var%tracers(:,:,:,i)), "write_model_init", &
+ "putting " // tracer_names(i))
+ end do
+
+ !Close files
+ call nc_check(nf90_close(ncfileid),'write_model_init','closing restart file')
+ call nc_check(nf90_close(ncfileid_t),'write_model_init','closing tracer file')
+
+ end subroutine write_model_init
+
+ ! ----------------------------------------------------------------------------
+
+ subroutine prog_var_to_vector(model_var, state_vector)
+ type(model_type), intent(in ) :: model_var
+ real(kind = r8), dimension(:), intent(out) :: state_vector
+
+ ! -----------------------------------------------
+ real, dimension(:, :), allocatable :: psurf
+ !
+ ! See note on reordering below. This could be generalized
+ !
+ integer, dimension(3) :: shape, order = (/ 3, 1, 2 /)
+ ! -----------------------------------------------
+
+ real, dimension(:,:,:), allocatable :: uvar, vvar, tvar
+ real, dimension(:,:,:,:), allocatable :: trcs
+ integer :: i,j,k
+
+ shape = (/ num_levels, num_lons, num_lats /)
+
+ if(size(state_vector) /= model_size) &
+ call error_handler(E_ERR, 'prog_var_to_vector', "State vector is incorrect size for model_type", source, revision, revdate)
+
+ allocate(psurf(num_lons, num_lats))
+
+ if (associated(model_var%ps)) then
+ psurf(:, :) = model_var%ps
+ else if (associated(model_var%delp)) then
+ psurf(:, :) = sum(model_var%delp, dim = 3) + model_top_pressure
+ else
+ call error_handler(E_ERR, 'prog_var_to_vector', "Neither delp nor ps is present in model_var", source, revision, revdate)
+ end if
+
+ allocate(uvar(num_levels,num_lons,num_lats),vvar(num_levels,num_lons,num_lats), &
+ tvar(num_levels,num_lons,num_lats))
+ allocate(trcs(num_levels,num_lons,num_lats,num_tracers))
+
+ forall(i = 1:num_levels, j=1:num_lons,k=1:num_lats)
+ uvar(i,j,k) = model_var%u(j,k,i)
+ vvar(i,j,k) = model_var%v(j,k,i)
+ tvar(i,j,k) = model_var%t(j,k,i)
+ trcs(i,j,k,1:3) = model_var%tracers(j,k,i,1:3)
+ end forall
+ !
+ ! 2D field ps is ordered lon, lat; 3D fields are ordered lon, lat, level but are
+ ! reordered to level, lon, lat before packing into vectors
+ ! Tracers are ordered lon, lat, level, tracer_num, pack as level, lon, lat, tracer_num.
+ if(num_tracers > 0) then
+ state_vector(:) = (/ pack(psurf, .true.), &
+ pack(uvar, .true.), &
+ pack(vvar, .true.), &
+ pack(tvar, .true.), &
+ pack(trcs, .true.) /)
+ else
+ state_vector(:) = (/ pack(psurf, .true.), &
+ pack(uvar, .true.), &
+ pack(vvar, .true.), &
+ pack(tvar, .true.) /)
+ end if
+ deallocate(psurf,uvar,vvar,tvar,trcs)
+ end subroutine prog_var_to_vector
+
+ ! ----------------------------------------------------------------------------
+
+ subroutine vector_to_prog_var(state_vector, model_var)
+ real(kind = r8), dimension(:), intent(in ) :: state_vector
+ type(model_type), intent(inout) :: model_var
+
+ ! -----------------------------------------------
+ !
+ ! Local variables
+ !
+ real, dimension(:, :), allocatable :: psurf
+ logical, dimension(:, :, :), allocatable :: allTrue
+ integer :: start, finish, k
+ !
+ ! See note on reordering below. This could be generalized
+ !
+ integer, dimension(3) :: shape, order = (/ 2, 3, 1 /)
+
+ real, dimension(:,:,:), allocatable :: uvar, vvar, tvar
+ real, dimension(:,:,:,:), allocatable :: trcs
+ integer :: i, j, m
+
+ ! -----------------------------------------------
+
+ if(size(state_vector) /= model_size) &
+ call error_handler(E_ERR, 'prog_var_to_vector', "State vector is incorrect size for model_type", source, revision, revdate)
+
+ allocate(allTrue(num_levels, num_lons, num_lats))
+ allTrue(:, :, :) = .true.
+ shape = (/ num_lons, num_lats, num_levels /)
+
+ allocate(psurf(num_lons, num_lats))
+ !
+ ! 2D field - surface pressure (may be mapped to delp)
+ !
+ start = 1; finish = start + (num_lons * num_lats) - 1
+ psurf = unpack(state_vector(start:finish), allTrue(1, :, :), field = 0._r8)
+ if (associated(model_var%ps)) then
+ model_var%ps = psurf(:, :)
+ else if (associated(model_var%delp)) then
+ !
+ ! Compute delp from fixed ak and bk terms and surface pressure
+ !
+ do k = 1, num_levels
+ model_var%delp(:, :, k) = (ak(k+1) - ak(k)) + (bk(k+1) - bk(k)) * psurf(:, :)
+ end do
+ else
+ call error_handler(E_ERR, 'vector_to_prog_var', "Neither delp nor ps is present in model_var", source, revision, revdate)
+ end if
+
+ !
+ ! 3D fields - u, v, and T
+ ! These were stored in dim order level, lon, lat in the state vector and need
+ ! to be mapped back to lon, lat, level
+ !
+
+ allocate(uvar(num_levels,num_lons,num_lats),vvar(num_levels,num_lons,num_lats),&
+ tvar(num_levels,num_lons,num_lats))
+
+ start = finish + 1; finish = start + (num_levels * num_lons * num_lats) - 1
+ uvar = unpack(state_vector(start:finish), allTrue, field = 0._r8)
+
+ start = finish + 1; finish = start + (num_levels * num_lons * num_lats) - 1
+ vvar = unpack(state_vector(start:finish), allTrue, field = 0._r8)
+
+ start = finish + 1; finish = start + (num_levels * num_lons * num_lats) - 1
+ tvar = unpack(state_vector(start:finish), allTrue, field = 0._r8)
+
+ !Fill model_var components
+ forall(i = 1:num_lons, j=1:num_lats,m=1:num_levels)
+ model_var%u(i,j,m) = uvar(m,i,j)
+ model_var%v(i,j,m) = vvar(m,i,j)
+ model_var%t(i,j,m) = tvar(m,i,j)
+ end forall
+
+ !
+ ! Tracers
+ ! These were stored in dim order level, lon, lat, tracer_num in the state vector and need
+ ! to be mapped back to lon, lat, level, tracer_num
+ !
+ allocate(trcs(num_levels,num_lons,num_lats,num_tracers))
+ if(num_tracers > 0) then
+ start = finish + 1; finish = start + (num_levels * num_lons * num_lats * num_tracers) - 1
+ if(finish /= model_size) &
+ call error_handler(E_ERR, 'vector_to_prog_var', "Mismatch between model size and state vector", source, revision, revdate)
+ trcs = unpack(state_vector(start:finish), &
+ spread(allTrue, dim = 4, nCopies = num_tracers),field = 0._r8)
+
+ !Fill model_var%tracers
+ forall(i = 1:num_lons, j=1:num_lats,m=1:num_levels)
+ model_var%tracers(i,j,m,1:3) = trcs(m,i,j,1:3)
+ end forall
+
+ end if
+
+ deallocate(allTrue)
+ deallocate(psurf,uvar,vvar,tvar,trcs)
+ end subroutine vector_to_prog_var
+
+ ! ----------------------------------------------------------------------------
+ !
+ ! Private procedures
+ !
+ ! ----------------------------------------------------------------------------
+
+ subroutine read_dimension_info(ncFileId)
+ integer, intent(in) :: ncFileId
+ !
+ ! Called by static_model_init; fills in global variables related to the coordinates
+ !
+ integer :: i, temp_size
+ integer, dimension(num_dims) :: dim_ids
+
+ do i = 1, num_dims
+ call nc_check(nf90_inq_dimid(ncfileid, trim(dim_names(i)), dim_ids(i)), &
+ 'static_init_model', 'looking for dimension id for '// trim(dim_names(i)))
+ call nc_check(nf90_Inquire_Dimension(ncfileid, dim_ids(i), len = dim_lens(i)), &
+ 'static_init_model', 'looking for size of '// trim(dim_names(i)))
+ call nc_check(nf90_inq_varid(ncfileid, trim(dim_names(i)), dim_var_ids(i)), &
+ 'static_init_model', 'looking for variable id for '// trim(dim_names(i)))
+ end do
+
+ do i = 1, size(dim_names)
+ if(all(trim(dim_names(i)) /= (/ 'lat', 'latu', 'lon', 'lonv', 'pfull', 'phalf' /)) ) &
+ call error_handler(E_ERR, 'read_dimension_info', "Mapping between dim names and variables is messed up", source, revision, revdate)
+ end do
+ if(dim_lens(1) /= dim_lens(2) .or. dim_lens(3) /= dim_lens(4) .or. dim_lens(5) /= dim_lens(6) - 1) &
+ call error_handler(E_ERR, 'read_dimension_info', "Dimension sizes aren't what we expect", source, revision, revdate)
+
+
+ num_lats = dim_lens(1)
+ allocate(lat(num_lats), latu(num_lats))
+ call nc_check(nf90_get_var(ncfileid, dim_var_ids(1), lat), &
+ 'static_init_model', 'reading values of '// trim(dim_names(1)))
+ call nc_check(nf90_get_var(ncfileid, dim_var_ids(2), latu), &
+ 'static_init_model', 'reading values of '// trim(dim_names(2)))
+
+ num_lons = dim_lens(3)
+ allocate(lon(num_lons), lonv(num_lons))
+ call nc_check(nf90_get_var(ncfileid, dim_var_ids(3), lon), &
+ 'static_init_model', 'reading values of '// trim(dim_names(3)))
+ call nc_check(nf90_get_var(ncfileid, dim_var_ids(4), lonv), &
+ 'static_init_model', 'reading values of '// trim(dim_names(4)))
+
+ num_levels = dim_lens(5)
+ allocate(pfull(num_levels), phalf(num_levels + 1))
+ call nc_check(nf90_get_var(ncfileid, dim_var_ids(5), pfull), &
+ 'static_init_model', 'reading values of '// trim(dim_names(5)))
+ call nc_check(nf90_get_var(ncfileid, dim_var_ids(6), phalf), &
+ 'static_init_model', 'reading values of '// trim(dim_names(6)))
+
+ end subroutine read_dimension_info
+
+ ! ----------------------------------------------------------------------------
+
+ subroutine copy_dim_var_pair(newFileId, dimName, dimLength, oldFileId, routineName)
+ integer, intent(in) :: newFileId, dimLength, oldFileId
+ character(len = *), intent(in) :: dimName, routineName
+ !
+ ! Create a dimension and corresponding 1d double variable in newFileId
+ ! using the definitions in oldFileId
+ !
+
+ integer :: att, num_atts, newDimId, oldVarId, newVarId
+ character(len = NF90_MAX_NAME) :: attName
+
+ call nc_check(nf90_def_dim(newFileId, trim(dimName), dimLength, newDimId), &
+ trim(routineName), 'def_dim ' // trim(dimName) )
+ call nc_check(nf90_def_var(newFileId, trim(dimName), nf90_double, newDimId, newVarId), &
+ trim(routineName), 'def_var ' // trim(dimName) )
+ !
+ ! Copy over all the attributes from the old file to the newnostics file
+ !
+ call nc_check(nf90_inq_varid(oldFileId, trim(dimName), oldVarId), &
+ trim(routineName), 'getting varid from old file for ' // trim(dimName) )
+ call nc_check(nf90_inquire_variable(oldFileId, oldVarId, nAtts = num_atts), &
+ trim(routineName), 'reading number of attributes from ' // trim(dimName) )
+ do att = 1, num_atts
+ call nc_check(nf90_inq_attname(oldFileId, oldVarId, att, attName), &
+ trim(routineName), 'reading number of attributes from ' // trim(dimName) )
+ call nc_check(nf90_copy_att(oldFileId, oldVarId, trim(attName), newFileId, newVarId), &
+ trim(routineName), 'copying attribute ' // trim(attName) // ' for variable ' // trim(dimName) )
+ end do
+ end subroutine copy_dim_var_pair
+
+ ! ----------------------------------------------------------------------------
+
+ subroutine define_3d_real_var(newFileId, oldFileId, varName, dimIds, routineName)
+ integer, intent(in) :: newFileId, oldFileId
+ character(len = *), intent(in) :: varName, routineName
+ integer, dimension(:), intent(in) :: dimIds
+ !
+ ! Define a new variable in newFileId, then copy all the attributes from the same variable
+ ! in oldFileId
+ !
+
+ integer :: att, num_atts, oldVarId, newVarId
+ character(len = NF90_MAX_NAME) :: attName
+
+ call nc_check(nf90_def_var(newFileId, trim(varName), nf90_real, dimIds, newVarId), &
+ trim(routineName),'def_var ' // trim(varName))
+
+ call nc_check(nf90_inq_varid(oldFileId, trim(varName), oldVarId), &
+ trim(routineName), 'getting varid from old file for ' // trim(varName) )
+ call nc_check(nf90_inquire_variable(oldFileId, oldVarId, nAtts = num_atts), &
+ trim(routineName), 'reading number of attributes from ' // trim(varName) )
+ do att = 1, num_atts
+ call nc_check(nf90_inq_attname(oldFileId, oldVarId, att, attName), &
+ trim(routineName), 'reading attributes from ' // trim(varName) )
+ call nc_check(nf90_copy_att(oldFileId, oldVarId, trim(attName), newFileId, newVarId), &
+ trim(routineName), 'copying attribute ' // trim(attName) // ' for variable ' // trim(varName) )
+ end do
+
+ end subroutine define_3d_real_var
+
+ ! ----------------------------------------------------------------------------
+
+ subroutine write_1D_values(ncFileId, varName, values, routineName)
+ integer, intent(in) :: ncFileId
+ character(len = *), intent(in) :: varName, routineName
+ real, dimension(:), intent(in) :: values
+
+ integer :: varId
+
+ call nc_check(nf90_inq_varid(ncFileId, trim(varName), varId), trim(routineName), 'getting varid for ' // trim(varName) )
+ call nc_check(nf90_put_var(ncFileId, varId, values), trim(routineName), 'writing ' // trim(varName) )
+
+ end subroutine write_1D_values
+
+ ! ----------------------------------------------------------------------------
+
+ elemental function makePeriodic(a, aMin, aMax)
+ real(kind=r8), intent(in) :: a, aMin, aMax
+ real(kind=r8) :: makePeriodic
+ !
+ ! Ensure that a position is within domain when the boundary conditions are periodic
+ !
+ ! makePeriodic = aMin + mod(a - aMin, aMax - aMin)
+ ! if(makePeriodic < aMin) makePeriodic = aMax - abs(makePeriodic - aMin)
+ makePeriodic = a
+ do
+ if(makePeriodic <= aMax .and. makePeriodic > aMin) exit
+ if(makePeriodic > aMax) then
+ makePeriodic = makePeriodic - (aMax - aMin)
+ else if (makePeriodic == aMin) then
+ makePeriodic = aMax
+ else
+ makePeriodic = makePeriodic + (aMax - aMin)
+ end if
+ end do
+ end function makePeriodic
+ !------------------------------------------------------------------------------------------
+ pure function interpolate1D(desiredLocation, values, locations)
+ real(kind = r8), intent(in) :: desiredLocation
+ real(kind = r8), dimension(:), intent(in) :: values, locations
+ real(kind = r8) :: interpolate1D
+ !
+ ! Given a set of values at a set of locations (ordered from lowest to highest coordinate value)
+ ! return the value at an arbirtrary point using linear interoplation
+ !
+ real(kind =r8) :: fraction
+ integer :: n, vIndex ! Such that locations(vIndex) <= desiredLocation < location(vIndex+1)
+ ! ----------------------
+
+ n = size(values) ! = size(locations)
+ if(desiredLocation < locations(1) .or. desiredLocation > locations(n)) then
+ !
+ ! Desired location is out of bounds
+ !
+ interpolate1D = -huge(interpolate1D)
+ else if (abs(desiredLocation - locations(n)) <= spacing(desiredLocation)) then
+ !
+ ! Desired location is exactly at the last location
+ !
+ interpolate1D = values(n)
+ else
+ vIndex = findIndex(desiredLocation, locations)
+ fraction = 1._r8 - (desiredLocation - locations(vIndex)) / &
+ (locations(vIndex + 1) - locations(vIndex))
+ interpolate1D = fraction * values(vIndex) + (1._r8 - fraction) * values(vIndex + 1)
+ end if
+ end function interpolate1D
+ !------------------------------------------------------------------------------------------
+
+ pure function findIndex(value, table, firstGuess)
+ real(kind=r8), intent( in) :: value
+ real(kind=r8), dimension(:), intent( in) :: table
+ integer, optional, intent( in) :: firstGuess
+ integer :: findIndex
+ !
+ ! Find the index i into the table such that table(i) <= value < table(i+i)
+ ! This is modeled after routine "hunt" from Numerical Recipes, 2nd ed.,
+ ! pg 112. Here we know that the values in the table are always increasing,
+ ! that every value should be spanned by the table entries, and the firstGuess
+ ! always makes sense.
+
+ ! Local variables
+ integer :: lowerBound, upperBound, midPoint
+ integer :: increment
+
+ ! Hunting; only done if a first guess is supplied
+ ! Move upper and lower bounds around until the value is spanned by
+ ! table(lowerBound) and table(upperBound). Make the interval twice as
+ ! big at each step
+ if(present(firstGuess)) then
+ lowerBound = firstGuess
+ increment = 1
+ huntingLoop: do
+ upperBound = min(lowerBound + increment, size(table))
+ if(lowerBound == size(table) .or. &
+ (table(lowerBound) <= value .and. table(upperBound) > value)) exit huntingLoop
+ if(table(lowerBound) > value) then
+ upperBound = lowerBound
+ lowerBound = max(upperBound - increment, 1)
+ else
+ ! Both table(lowerBound) and table(upperBound) are <= value
+ lowerBound = upperBound
+ end if
+ increment = increment * 2
+ end do huntingLoop
+ else
+ lowerBound = 0; upperBound = size(table)
+ end if
+
+ ! Bisection: figure out which half of the remaining interval holds the
+ ! desired value, discard the other half, and repeat
+ bisectionLoop: do
+ if(lowerBound == size(table) .or. upperBound <= lowerBound + 1) exit bisectionLoop
+ midPoint = (lowerBound + upperBound)/2
+ if(value >= table(midPoint)) then
+ lowerBound = midPoint
+ else
+ upperBound = midPoint
+ end if
+ end do bisectionLoop
+
+ findIndex = lowerBound
+ end function findIndex
+
+ !------------------------------------------------------------------------------------------
+
+end module model_mod
Property changes on: DART/trunk/models/am2/model_mod.f90
___________________________________________________________________
Name: svn:keywords
+ "Date Rev Author URL Id"
Added: DART/trunk/models/am2/shell_scripts/advance_model.csh
===================================================================
--- DART/trunk/models/am2/shell_scripts/advance_model.csh (rev 0)
+++ DART/trunk/models/am2/shell_scripts/advance_model.csh 2008-04-04 22:14:26 UTC (rev 3291)
@@ -0,0 +1,234 @@
+#!/bin/csh
+#
+# Data Assimilation Research Testbed -- DART
+# Copyright 2004-2006, Data Assimilation Research Section
+# University Corporation for Atmospheric Research
+# Licensed under the GPL -- www.gpl.org/licenses/gpl.html
+#
+# <next few lines under version control, do not edit>
+# $URL$
+# $Id: advance_model.csh 3116 2007-10-15 17:56:07Z nancy $
+# $Revision$
+# $Date: 2007-10-15 10:56:07 -0700 (Mon, 15 Oct 2007) $
+
+#----------------------------------------------------------------------
+# advance_model.csh
+#
+# Script to advance one ensemble member one filter "time step"
+# when the model advance is executed as a separate process.
+# Called by the filter executable.
+# Calls run-cam.csh, the CAM execution script.
+# Calls 3 translation routines to translate time and model state.
+# Runs on one of the compute nodes allotted to the filter executable
+#
+# Arguments are the process number of caller, the number of state copies
+# belonging to that process, and the name of the filter_control_file for
+# that process
+
+# arg#1 is the name of the CENTRALDIR
+# arg#2 is the number of ensemble members
+# arg#3 is the name of the control file that determines which members will
+# be advanced on which processors.
+#----------------------------------------------------------------------
+
+set process = $1
+set num_states = $2
+set control_file = $3
+echo "advance_model.csh args = $1 $2 $3"
+
+set retry_max = 2
+
+# Get unique name for temporary working directory for this process's stuff
+set temp_dir = 'advance_temp'${process}
+
+# args to previous version of this script
+set myname = $0
+set CENTRALDIR = `pwd`
+
+# Create a clean temporary directory and go there
+\rm -rf $temp_dir
+mkdir -p $temp_dir
+cd $temp_dir
+
+# People have the craziest aliases. These prevent the obsessive-compulsive
+# from causing themselves no end of angst.
+if ( ! $?REMOVE ) then
+ set REMOVE = 'rm -rf'
+endif
+if ( ! $?COPY ) then
+ set COPY = 'cp -fp'
+endif
+if ( ! $?MOVE ) then
+ set MOVE = 'mv -f'
+endif
+if ( ! $?LINK ) then
+ set LINK = 'ln -fs'
+endif
+
+echo "CENTRALDIR is ${CENTRALDIR}" > cam_out_temp
+echo "temp_dir is $temp_dir" >> cam_out_temp
+
+# Get information about this experiment from file "casemodel",
+# created by the main controlling script (job.csh)
+
+# set $case = the case and
+# $model = the directory name (in the CAM source tree)
+# where CAM executable will be found.
+# set locations of the CAM and CLM input files
+set case = `head -1 ${CENTRALDIR}/casemodel | tail -1`
+set model = `head -2 ${CENTRALDIR}/casemodel | tail -1`
+set cam_init = `head -3 ${CENTRALDIR}/casemodel | tail -1`
+set clm_init = `head -4 ${CENTRALDIR}/casemodel | tail -1`
+
+# output diagnostic information to the same file as the CAM list-directed output
+echo "case $case model $model" >> cam_out_temp
+echo "cam init is $cam_init" >> cam_out_temp
+echo "clm init is $clm_init" >> cam_out_temp
+
+# Loop through each ensemble this task is responsible for advancing.
+set ensemble_number_line = 1
+set input_file_line = 2
+set output_file_line = 3
+set state_copy = 1
+while($state_copy <= $num_states)
+
+ # loop through the control file, extracting lines in groups of 3.
+ set ensemble_number = `head -$ensemble_number_line ../$control_file | tail -1`
+ set input_file = `head -$input_file_line ../$control_file | tail -1`
+ set output_file = `head -$output_file_line ../$control_file | tail -1`
+
+ # the previous script used element instead of ensemble_number. make them
+ # the same for now.
+ set element = $ensemble_number
+ touch cam_out_temp
+ echo "starting ${myname} for ens member $element at "`date` >> cam_out_temp
+
+ # get model state initial conditions for this ensemble member
+ ${LINK} ${CENTRALDIR}/$input_file temp_ic
+
+ # get filter namelists for use by cam
+ ${COPY} ${CENTRALDIR}/input.nml input.nml
+
+ # this just creates a file that helps you figure out which member is
+ # being advanced in this directory. FYI only, you don't need it.
+ echo $element >! element
+ cp element element$element
+
+ echo "ls $temp_dir for element $element" >> cam_out_temp
+ ls -lRt >> cam_out_temp
+
+ # Need a base CAM initial file into which to copy state vector from filter.
+ # c[al]minput_$element also carry along CAM/CLM fields which are not updated
+ # by the filter (not part of the filter model state).
+ # First look for c[al]minput.nc resulting from the previous advance of this ensemble
+ # member from within the same day/obs_seq.out time span (in CENTRALDIR)
+ # Failing that, look for the results of the last advance of this ensemble member
+ # of the previous obs_seq.out (i.e. in CENTRALDIR/exp_name/day/CAM)
+ # Failing that (when starting an experiment which has no spun up set of members)
+ # get a copy of a single CAM initial file (usually from somewhere independent
+ # of this experiment, i.e. /scratch/.../New_state/T42_GWD/CAM/caminput_0.nc)
+
+ if (-e ${CENTRALDIR}/caminput_${element}.nc) then
+ ${COPY} ${CENTRALDIR}/caminput_${element}.nc caminput.nc
+ echo "CENTRALDIR caminput comes from ${CENTRALDIR}/caminput_${element}.nc" >> cam_out_temp
+ else if (-e ${cam_init}${element}.nc) then
+ ${COPY} ${cam_init}${element}.nc caminput.nc
+ echo "cam_init caminput comes from ${cam_init}${element}.nc" >> cam_out_temp
+ else
+ ${COPY} ${cam_init}0.nc caminput.nc
+ echo "DEFAULT caminput comes from ${cam_init}0.nc" >> cam_out_temp
+ endif
+
+ if ( -e ${CENTRALDIR}/clminput_${element}.nc) then
+ ${COPY} ${CENTRALDIR}/clminput_${element}.nc clminput.nc
+ else if (-e ${clm_init}${element}.nc) then
+ ${COPY} ${clm_init}${element}.nc clminput.nc
+ else
+ ${COPY} ${clm_init}0.nc clminput.nc
+ endif
+
+ ${LINK} ${CENTRALDIR}/topog_file.nc .
+
+ # create 'times' file for CAM from DART times in assim_model_state_ic#
+ # This info is passed to CAM through the creation of its namelist
+ if (-e temp_ic && -e ${CENTRALDIR}/trans_time) then
+ echo 'advance_model; executing trans_time '`date` >> cam_out_temp
+ ${CENTRALDIR}/trans_time >> cam_out_temp
+ ls -lt >> cam_out_temp
+ ${COPY} times ${CENTRALDIR}
+ else
+ echo "ERROR: either ic file $element or trans_time not available for trans_time"
+ exit 1
+ endif
+
+ # Create an initial CAM.nc file from the DART state vector
+ # Times are handled separately in trans_time
+ echo ' ' >> cam_out_temp
+ echo 'Executing trans_sv_pv' >> cam_out_temp
+ ${CENTRALDIR}/trans_sv_pv >> cam_out_temp
+ ls -ltR >> cam_out_temp
+
+ # advance cam
+ # echo executing: ${model:h}/run-cam.csh ${case}-$element $model ${CENTRALDIR} >> cam_out_temp
+ set retry = 0
+ while ($retry < $retry_max)
+ echo executing: ${CENTRALDIR}/run-cam.csh ${case}-$element $model ${CENTRALDIR} \
+ >> cam_out_temp
+ ${CENTRALDIR}/run-cam.csh ${case}-$element $model ${CENTRALDIR} >>& cam_out_temp
+
+ grep 'END OF MODEL RUN' cam_out_temp > /dev/null
+ if ($status == 0) then
+ set retry = $retry_max
+ # Extract the new state vector information from the new caminput.nc and
+ # put it in temp_ud (time followed by state)
+ echo ' ' >> cam_out_temp
+ echo 'Executing trans_pv_sv' >> cam_out_temp
+ ${CENTRALDIR}/trans_pv_sv >> cam_out_temp
+
+ # Move updated state vector and new CAM/CLM initial files back to experiment
+ # directory for use by filter and the next advance.
+ ${MOVE} temp_ud ${CENTRALDIR}/$output_file
+ ${MOVE} clminput.nc ${CENTRALDIR}/clminput_${element}.nc
+ ${MOVE} caminput.nc ${CENTRALDIR}/caminput_${element}.nc
+ ${MOVE} namelist ${CENTRALDIR}
+
+ echo "finished ${myname} for ens member $element at "`date` >> cam_out_temp
+ ${MOVE} cam_out_temp ${CENTRALDIR}/cam_out_temp$element
+ else
+ @ retry++
+ if ($retry < $retry_max) then
+ echo "WARNING - CAM $element stopped abnormally; will be retried"
+ echo "WARNING - CAM $element stopped abnormally; will be retried" >> cam_out_temp
+ echo "===========================================================" >> cam_out_temp
+ else
+ set DEADDIR = ${temp_dir}_dead
+ echo "WARNING - CAM $element stopped abnormally; see $DEADDIR"
+ echo "WARNING - CAM $element stopped abnormally; see $DEADDIR" >> cam_out_temp
+ ${COPY} cam_out_temp ${CENTRALDIR}/cam_out_temp${element}_died
+ mkdir $DEADDIR
+ ${MOVE} * $DEADDIR
+ exit -${element}
+ endif
+ endif
+ end
+
+ # if this process needs to advance more than one model, read the next set of
+ # filenames and ensemble number at the top of this loop.
+
+ @ state_copy++
+ @ ensemble_number_line = $ensemble_number_line + 3
+ @ input_file_line = $input_file_line + 3
+ @ output_file_line = $output_file_line + 3
+end
+
+cd ${CENTRALDIR}
+
+# lightnings filesystem was misbehaving if you removed the directory.
+# it was more reliable if you just left the directory empty. It's bogus, but true.
+${REMOVE} $temp_dir/*
+
+# Remove the filter_control file to signal completion
+# Is there a need for any sleeps to avoid trouble on completing moves here?
+\rm -rf $control_file
+
+
Property changes on: DART/trunk/models/am2/shell_scripts/advance_model.csh
___________________________________________________________________
Name: svn:executable
+ *
Name: svn:keywords
+ "Date Rev Author URL Id"
Added: DART/trunk/models/am2/shell_scripts/check_model.csh
===================================================================
--- DART/trunk/models/am2/shell_scripts/check_model.csh (rev 0)
+++ DART/trunk/models/am2/shell_scripts/check_model.csh 2008-04-04 22:14:26 UTC (rev 3291)
@@ -0,0 +1,25 @@
+#!/bin/csh
+#
+# Data Assimilation Research Testbed -- DART
+# Copyright 2004-2007, Data Assimilation Research Section
+# University Corporation for Atmospheric Research
+# Licensed under the GPL -- www.gpl.org/licenses/gpl.html
+#
+# <next few lines under version control, do not edit>
+# $URL$
+# $Id: check_model.csh 2691 2007-03-11 18:18:09Z thoar $
+# $Revision$
+# $Date: 2007-03-11 11:18:09 -0700 (Sun, 11 Mar 2007) $
+
+if ($#argv < 1) then
+ echo "usage; check_cam num_ens_members"
+ exit
+endif
+
+set n = 1
+while ($n <= $1)
+ tail -40 cam_out_temp$n | grep 'END OF MODEL RUN' > /dev/null
+ if ($status != 0) echo cam_out_temp$n finished abnormally
+ @ n++
+end
+exit
Property changes on: DART/trunk/models/am2/shell_scripts/check_model.csh
___________________________________________________________________
Name: svn:executable
+ *
Name: svn:keywords
+ "Date Rev Author URL Id"
Added: DART/trunk/models/am2/shell_scripts/diags.csh
===================================================================
--- DART/trunk/models/am2/shell_scripts/diags.csh (rev 0)
+++ DART/trunk/models/am2/shell_scripts/diags.csh 2008-04-04 22:14:26 UTC (rev 3291)
@@ -0,0 +1,89 @@
+#!/bin/csh
+#
+# Data Assimilation Research Testbed -- DART
+# Copyright 2004-2007, Data Assimilation Research Section
+# University Corporation for Atmospheric Research
+# Licensed under the GPL -- www.gpl.org/licenses/gpl.html
+#
+# <next few lines under version control, do not edit>
+# $URL$
+# $Id: diags.csh 2691 2007-03-11 18:18:09Z thoar $
+# $Revision$
+# $Date: 2007-03-11 11:18:09 -0700 (Sun, 11 Mar 2007) $
+
+# DART source directory on this machine
+
+set DART = ~/Pre-J/DART
+set exp_dir = `pwd`
+set innov_freq = 1
+
+if ($#argv == 0) then
+ echo
+ echo '1) Sets up environment for and runs the obs_diag program for'
+ echo ' observation space diagnostics in the $exp directory.'
+ echo '2) Runs the suite of matlab diagnostics.'
+ echo '3) Generates some innovation files and optionally (securely)'
+ echo 'copies the results to another machine.'
+ echo 'Takes several arguments, but must be edited to change'
+ echo 'other characteristics.'
+ echo
+ echo 'Usage: diags.csh last_obs_seq ps_direct [exp_mach:/directory]'
+ exit
+else if ($#argv == 3) then
+ set destin = $3
+endif
+
+set last_dir = $1
+set ps_dir = $2
+
+ln -s ../topog_file.nc topog_file.nc
+ln -s ../caminput.nc caminput.nc
+cp $last_dir/input.nml .
+vi input.nml
+$DART/models/cam/work/obs_diag >& diag.out
+
+# matlab batch job
+# addpath $DART/diagnostics/matlab
+# to do from home;
+# matlab -nodisplay >&! matlab.out << EOF
+# orig
+matlab >&! matlab.out << EOF
+fit_ens_mean_time
+fit_ens_spread_time
+obs_num_time
+obs_num_vertical
+fit_mean_spread_time
+fit_ens_mean_vertical
+fit_ens_bias_vertical
+exit
+EOF
+
+mkdir $ps_dir
+mv *.m *.dat *.ps input.nml $ps_dir
+tar c -f {$exp_dir:t}_${last_dir}_Diags.tar $ps_dir/*.ps $ps_dir/input.nml
+
+set n = 1
+set more = true
+while ($more == true)
+ set obs_seq = 01_0$n
+ if ($n > 9) set obs_seq = 01_$n
+ if ($obs_seq == $last_dir) set more = false
+
+ if (! -e $obs_seq/Innov.nc && $n % $innov_freq == 0) then
+ cd $obs_seq
+ innov
+ cd ..
+# tar r -f {$exp_dir:t}_${last_dir}_Diags.tar $obs_seq/*.nc
+# tar r -f {$exp_dir:t}_${last_dir}_Diags.tar $obs_seq/*.nc $obs_seq/input.nml
+ endif
+# orig
+ tar r -f {$exp_dir:t}_${last_dir}_Diags.tar $obs_seq/P*.nc
+ @ n++
+end
+
+if ($#argv == 3) then
+ scp {$exp_dir:t}_${last_dir}_Diags.tar $destin
+ scp ../explist $destin
+endif
+
+exit
Property changes on: DART/trunk/models/am2/shell_scripts/diags.csh
___________________________________________________________________
Name: svn:executable
+ *
Name: svn:keywords
+ "Date Rev Author URL Id"
Added: DART/trunk/models/am2/shell_scripts/job.simple.csh
===================================================================
--- DART/trunk/models/am2/shell_scripts/job.simple.csh (rev 0)
+++ DART/trunk/models/am2/shell_scripts/job.simple.csh 2008-04-04 22:14:26 UTC (rev 3291)
@@ -0,0 +1,382 @@
+#!/bin/csh
+#
+# Data Assimilation Research Testbed -- DART
+# Copyright 2004-2007, Data Assimilation Research Section
+# University Corporation for Atmospheric Research
+# Licensed under the GPL -- www.gpl.org/licenses/gpl.html
+#
+# <next few lines under version control, do not edit>
+# $URL$
+# $Id: job.simple.csh 2691 2007-03-11 18:18:09Z thoar $
+# $Revision$
+# $Date: 2007-03-11 11:18:09 -0700 (Sun, 11 Mar 2007) $
+
+#-----------------------------------------------------------------------------
+# job.simple.csh ... Top level script to run a single assimilation experiment.
+#
+# Unlike the more complex job.csh, this script only processes a single
+# observation file. Still fairly complex; requires a raft of
+# data files and most of them are in hardcoded locations.
+#
+# You need to know which of several batch systems you are using. The most
+# common one is LSF. PBS is also common. (POE is another but is
+# not supported directly by this script. It is not recommended that you have a
+# parallel cluster without a batch system (it schedules which nodes are assigned
+# to which processes) but it is possible to run that way -- you have to do
+# more work to get the information about which nodes are involved to the
+# parallel tasks -- but anyway, there is a section below that uses ssh and no
+# batch.
+#
+# How to submit this job:
+# 1. Look at the #BSUB or #PBS sections below and adjust any of the parameters
+# on your cluster. Queue names are very system specific; some systems
+# require wall-clock limits; some require an explicit charge code.
+# 2. Submit this script to the queue:
+# LSF: bsub < job.simple.csh
+# PBS: qsub job.simple.csh
+# NONE: job.simple.csh
+#
+# The script moves the necessary files to the current directory and then
+# starts 'filter' as a parallel job on all nodes; each of these tasks will
+# call some a separate model_advance.csh when necessary.
+#
+# The central directory is where the scripts reside and where script and
+# program I/O are expected to happen.
+#-----------------------------------------------------------------------------
+#
+#=============================================================================
+# This block of directives constitutes the preamble for the LSF queuing system
+# LSF is used on the IBM Linux cluster 'lightning'
+# LSF is used on the IMAGe Linux cluster 'coral'
+# LSF is used on the IBM 'bluevista'
+# The queues on lightning and bluevista are supposed to be similar.
+#
+# the normal way to submit to the queue is: bsub < job.simple.csh
+#
+# an explanation of the most common directives follows:
+# -J Job name
+# -o STDOUT filename
+# -e STDERR filename
+# -P account
+# -q queue cheapest == [standby, economy, (regular,debug), premium] == $$$$
+# -n number of processors (really)
+# -W hr:mn max wallclock time (required on some systems)
+##=============================================================================
+#BSUB -J DARTCAM
+#BSUB -o DARTCAM.%J.log
+#BSUB -q regular
+#BSUB -n 1
+#
+#
+##=============================================================================
+## This block of directives constitutes the preamble for the PBS queuing system
+## PBS is used on the CGD Linux cluster 'bangkok'
+## PBS is used on the CGD Linux cluster 'calgary'
+##
+## the normal way to submit to the queue is: qsub job.simple.csh
+##
+## an explanation of the most common directives follows:
+## -N Job name
+## -r n Declare job non-rerunable
+## -e <arg> filename for standard error
+## -o <arg> filename for standard out
+## -q <arg> Queue name (small, medium, long, verylong)
+## -l nodes=xx:ppn=2 requests BOTH processors on the node. On both bangkok
+## and calgary, there is no way to 'share' the processors
+## on the node with another job, so you might as well use
+## them both. (ppn == Processors Per Node)
+##=============================================================================
+#PBS -N DARTCAM
+#PBS -r n
+#PBS -e DARTCAM.err
+#PBS -o DARTCAM.log
+#PBS -q medium
+#PBS -l nodes=2:ppn=2
+
+# A common strategy for the beginning is to check for the existence of
+# some variables that get set by the different queuing mechanisms.
+# This way, we know which queuing mechanism we are working with,
+# and can set 'queue-independent' variables for use for the remainder
+# of the script.
+
+if ($?LS_SUBCWD) then
+
+ # LSF has a list of processors already in a variable (LSB_HOSTS)
+
+ set CENTRALDIR = $LS_SUBCWD
+ set JOBNAME = $LSB_JOBNAME
+ alias submit 'mpirun.lsf \!*'
+
+else if ($?PBS_O_WORKDIR) then
+
+ # PBS has a list of processors in a file whose name is (PBS_NODEFILE)
+
+ set CENTRALDIR = $PBS_O_WORKDIR
+ set JOBNAME = $PBS_JOBNAME
+ alias submit 'mpirun \!*'
+
+else if ($?OCOTILLO_NODEFILE) then
+
+ # ocotillo is a 'special case'. It is the only cluster I know of with
+ # no queueing system. You must generate a list of processors in a
+ # file whose name is in $OCOTILLO_NODEFILE. For example ...
+ # setenv OCOTILLO_NODEFILE my_favorite_processors
+ # echo "node1" > $OCOTILLO_NODEFILE
+ # echo "node5" >> $OCOTILLO_NODEFILE
+ # echo "node7" >> $OCOTILLO_NODEFILE
+ # echo "node3" >> $OCOTILLO_NODEFILE
+
+ set CENTRALDIR = `pwd`
+ set JOBNAME = DARTCAM
+ # i think this is what we want, but csh will not let you do multiline
+ # executions; this argues for using ksh (line 2 below)... (and maybe
+ # it needs a cd as well?)
+ #alias submit 'foreach i ($OCOTILLO_NODEFILE) ; ssh $i csh \!* ; end'
+ #alias submit='for i in $OCOTILLO_NODEFILE ; do ssh $i (cd $CENTRALDIR; csh $*) ; done'
+ alias submit 'csh \!*'
+
+else
+
+ # interactive
+ # YOU need to know if you are using the PBS or LSF queuing
+ # system ... and set 'submit' accordingly.
+
+ set CENTRALDIR = `pwd`
+ set JOBNAME = DARTCAM
+ alias submit 'csh \!*'
+
+endif
+
+set myname = $0 # this is the name of this script
+
+# Set the experiment name.
+
+set experiment = CAM1X
+
+cd ${CENTRALDIR}
+
+# some systems don't like the -v option to any of the following
+
+set OSTYPE = `uname -s`
+switch ( ${OSTYPE} )
+ case IRIX64:
+ setenv REMOVE 'rm -rf'
+ setenv COPY 'cp -p'
+ setenv MOVE 'mv -f'
+ breaksw
+ case AIX:
+ setenv REMOVE 'rm -rf'
+ setenv COPY 'cp -p'
+ setenv MOVE 'mv -f'
+ breaksw
+ default:
+ setenv REMOVE 'rm -rvf'
+ setenv COPY 'cp -vp'
+ setenv MOVE 'mv -fv'
+ breaksw
+endsw
+
+echo " "
+echo "Running $JOBNAME on host "`hostname`
+echo "Initialized at "`date`
+echo "CENTRALDIR is "`pwd`
+
+#-----------------------------------------------------------------------------
+# Set variables containing various directory names where we will GET things
+#-----------------------------------------------------------------------------
+
+set DARTDIR = /home/coral/${user}/dart/DART
+set DARTCAMDIR = ${DARTDIR}/models/cam
+set CAMDATADIR = /fs/image/home/${user}/CAMDATA
+
+#-----------------------------------------------------------------------------
+# Get the DARTCAM executables and scripts
+#-----------------------------------------------------------------------------
+
+${COPY} ${DARTCAMDIR}/work/filter .
+${COPY} ${DARTCAMDIR}/work/assim_region .
+${COPY} ${DARTCAMDIR}/work/trans_date_to_dart .
+${COPY} ${DARTCAMDIR}/work/trans_pv_sv .
+${COPY} ${DARTCAMDIR}/work/trans_pv_sv_time0 .
+${COPY} ${DARTCAMDIR}/work/trans_sv_pv .
+${COPY} ${DARTCAMDIR}/work/trans_time .
+${COPY} ${DARTCAMDIR}/shell_scripts/advance_model.csh .
+${COPY} ${DARTCAMDIR}/shell_scripts/run-pc.csh .
+
+#-----------------------------------------------------------------------------
+# Get the necessary data files -- this is the hard part.
+# This script does not involve 'cold starting' CAM, nor spinning up DART.
+# The DARTics directory has one initial conditions file for
+# each ensemble member. We need one for each ...
+# The input.nml has a restart_in_file_name of 'filter_ic_old'
+# which must match the filename here.
+# Because that same namelist has 'single_restart_file_in' as .false.,
+# the restart_in_file_name gets an ensemble member number appended to it.
+#-----------------------------------------------------------------------------
+
+${COPY} ${CAMDATADIR}/input.nml .
+${COPY} ${CAMDATADIR}/obs_seq.out .
+
+# try to discover the ensemble size from the input.nml
+# this is some gory shell programming ... all to do 'something simple'
+
+grep ens_size input.nml >! ensstring.$$
+set STRING = "1,$ s#,##g"
+set ensstring = `sed -e "$STRING" ensstring.$$`
+set num_ens = $ensstring[3]
+
+${REMOVE} ensstring.$$
+
+echo "There are ${num_ens} ensemble members."
+
+# This just copies just the initial conditions for the correct number
+# of ensemble members.
+
+set DARTics = /ptmp/raeder/CAM_init/T21x80/03-01-01/DART_lunes
+
+set n = 1
+while($n <= ${num_ens})
+ set from = ${DARTics}/filter_ic*[.0]$n
+ ${COPY} $from filter_ic_old.$from:e
+ @ n++
+end
+
+${COPY} ${CAMDATADIR}/namelistin .
+${COPY} ${CAMDATADIR}/caminput.nc .
+${COPY} ${CAMDATADIR}/clminput.nc .
+set CAMics = /ptmp/raeder/CAM_init/T21x80/03-01-01/CAM/caminput_
+set CLMics = /ptmp/raeder/CAM_init/T21x80/03-01-01/CLM/clminput_
+
+#-----------------------------------------------------------------------------
+# T21
+# inflate_1_ic is the wrong size, but I need to set it to something
+# The CAMsrc directory is MORE than just the location of the executable.
+# There are more support widgets expected in the directory tree.
+#-----------------------------------------------------------------------------
+
+# set inflate_1_ic = ../Pre-J/Exp4/01_62/DART
+
+set CAMsrc = /home/coral/raeder/Cam3/cam3.1/models/atm/cam/bld/T21-O2
+
+#-----------------------------------------------------------------------------
+# Ensure the (output) experiment directory exists
+# All the CAM-related files will get put in ${experiment}/CAM
+# All the CLM-related files will get put in ${experiment}/CLM
+# All the DART-related files will get put in ${experiment}/DART
+#-----------------------------------------------------------------------------
+
+if (-d ${experiment}) then
+ echo "${experiment} already exists"
+else
+ echo "Making run-time directory ${experiment} ..."
+ mkdir -p ${experiment}
+endif
+mkdir -p ${experiment}/{CLM,CAM,DART}
+
+#-----------------------------------------------------------------------------
+# This is where I should check to make sure all the required files exist.
+#-----------------------------------------------------------------------------
+
+if (! -e namelistin ) then
+ echo "ERROR ... need a namelistin file."
+ exit 99
+endif
+
+#-----------------------------------------------------------------------------
+# get name of file containing PHIS from the CAM namelist. This will be used by
+# static_init_model to read in the PHIS field, which is used for height obs.
+#-----------------------------------------------------------------------------
+ grep bnd_topo namelistin >! topo_file
+ set STRING = "1,$ s#'##g"
+ set ensstring = `sed -e "$STRING" topo_file`
+ set topo_name = $ensstring[3]
+# ln -s $topo_name topog_file.nc
+ cp $topo_name topog_file.nc
+ chmod 644 topog_file.nc
+ ${REMOVE} topo_file
+
+#-----------------------------------------------------------------------------
+# Some information about CAM must be made available to advance_model.csh
+# filter_server.csh spawns advance_ens.csh which spawns advance_model.csh
+# 'casemodel' is required (by advance_model.csh) to be in the Central directory
+#-----------------------------------------------------------------------------
+
+echo "${experiment} ${CAMsrc} ${CAMics} ${CLMics}" >! casemodel
+
+#-----------------------------------------------------------------------------
+# Runs filter which integrates the results of model advances (async=2).
+#
+# A 20 member ensemble @ T21 can take anywhere between 10-30 minutes.
+#-----------------------------------------------------------------------------
+
+submit filter
+
+#-----------------------------------------------------------------------------
+# When filter.f90 finished, it creates a file called 'go_end_filter' in this
+# runtime directory (i.e. CENTRALDIR). The existence of 'go_end_filter' is
+# enough to signal filter_server.csh
+#
+# time to end.
+# filter_server.csh is in an infinite loop looking for the existence
+# of any of three files:
+# go_advance_model (time to advance the ensemble members)
+# go_assim_regions (time to assimilate the observations)
+# go_end_filter (time to end)
+#-----------------------------------------------------------------------------
+
+echo "Finished at "`date`
+
+#-----------------------------------------------------------------------------
+# Move the output to storage after filter completes.
+# At this point, all the restart,diagnostic files are in the CENTRALDIR
+# and need to be moved to the 'experiment permanent' directory.
+# We have had problems with some, but not all, files being moved
+# correctly, so we are adding bulletproofing to check to ensure the filesystem
+# has completed writing the files, etc. Sometimes we get here before
+# all the files have finished being written.
+#-----------------------------------------------------------------------------
+
+echo "Listing contents of CENTRALDIR before archiving"
+ls -l
+
+${MOVE} clminput_[1-9]*.nc ${experiment}/CLM
+
+${MOVE} cam_out_temp[1-9]* ${experiment}/CAM
+${MOVE} caminput_[1-9]*.nc ${experiment}/CAM
+
+${MOVE} filter_ic_old* ${experiment}/DART
+${MOVE} filter_ic_new* ${experiment}/DART
+${MOVE} assim_model_state_ud[1-9]* ${experiment}/DART
+${MOVE} assim_model_state_ic[1-9]* ${experiment}/DART
+#${MOVE} inflate_ic_new ${experiment}/DART
+#${MOVE} filter_control ${experiment}/DART
+#${MOVE} run_job.log ${experiment}/DART # filter_server runtime log
+${MOVE} Posterior_Diag.nc ${experiment}/DART
+${MOVE} Prior_Diag.nc ${experiment}/DART
+${MOVE} obs_seq.final ${experiment}/DART
+${MOVE} dart_log.out ${experiment}/DART
+
+${COPY} namelistin ${experiment}
+${MOVE} namelist ${experiment}
+${MOVE} casemodel ${experiment}
+
+# Good style dictates that you save the scripts so you can see what worked.
+
+${COPY} input.nml ${experiment}
+${COPY} *.csh ${experiment}
+${COPY} $myname ${experiment}
+
+# CAM leaves a bunch of remnants in your $HOME directory.
+# I have not figured out how to use them ... so I clean up.
+
+${REMOVE} ~/lnd.*.rpointer topog_file.nc
+
+ls -lrt
+
+echo "Depending on when filter_server.csh finishes, you may wind up"
+echo "with a couple files called filter_server.xxxx.[log,err]"
+echo "You could/should move them to ${experiment}/DART"
+echo "filter_server.csh will also remove the semaphor file go_end_filter,"
+echo "so do not remove it. If it still exists after filter_server has completed"
+echo "something is wrong ..."
+echo "Cheers."
Property changes on: DART/trunk/models/am2/shell_scripts/job.simple.csh
___________________________________________________________________
Name: svn:executable
+ *
Name: svn:keywords
+ "Date Rev Author URL Id"
Added: DART/trunk/models/am2/tester.f90
===================================================================
--- DART/trunk/models/am2/tester.f90 (rev 0)
+++ DART/trunk/models/am2/tester.f90 2008-04-04 22:14:26 UTC (rev 3291)
@@ -0,0 +1,27 @@
+program tester
+
+ implicit none
+ real*8, dimension(:,:,:), allocatable :: val, rval
+ integer :: i, j, k
+
+ allocate(val(2,3,4))
+
+ do i = 1, 2
+ do j = 1, 3
+ do k = 1, 4
+ val(i,j,k) = 2*i+3*j+4*k
+ end do
+ end do
+ end do
+
+ print *, val(1,:,:)
+ print *, val(2,:,:)
+
+ allocate(rval(4,2,3))
+
+ rval = reshape(val, (/4, 2, 3/), order = (/3,1,2/))
+
+ print *, rval(:,1,:)
+ print *, rval(:,2,:)
+
+end program tester
Property changes on: DART/trunk/models/am2/tester.f90
___________________________________________________________________
Name: svn:keywords
+ "Date Rev Author URL Id"
Added: DART/trunk/models/am2/trans_date_to_dart.f90
===================================================================
--- DART/trunk/models/am2/trans_date_to_dart.f90 (rev 0)
+++ DART/trunk/models/am2/trans_date_to_dart.f90 2008-04-04 22:14:26 UTC (rev 3291)
@@ -0,0 +1,73 @@
+! Data Assimilation Research Testbed -- DART
+! Copyright 2004-2007, Data Assimilation Research Section
+! University Corporation for Atmospheric Research
+! Licensed under the GPL -- www.gpl.org/licenses/gpl.html
+
+program trans_date_to_dart
+
+! <next few lines under version control, do not edit>
+! $URL$
+! $Id: trans_date_to_dart.f90 2713 2007-03-26 04:09:04Z thoar $
+! $Revision$
+! $Date: 2007-03-26 00:09:04 -0400 (Mon, 26 Mar 2007) $
+
+!----------------------------------------------------------------------
+! purpose: generate a Gregorian/DART date & time from standard date and time
+!
+! method: Read ASCII input(/output) file containing yyyy/mm/dd hh:mm:ss .
+! Reform time and date into form needed by DART.
+! Write out CAM time and date to i/o file for use in input.nlm .
+!
+! author: Kevin Raeder 8/18/03
+!
+!----------------------------------------------------------------------
+
+use utilities_mod, only : get_unit, initialize_utilities, finalize_utilities
+use time_manager_mod, only : time_type, write_time, &
+ get_time, set_time, get_date, set_date, &
+ set_calendar_type, GREGORIAN, get_calendar_type
+
+implicit none
+
+! version controlled file description for error handling, do not edit
+character(len=128), parameter :: &
+ source = "$URL$", &
+ revision = "$Revision$", &
+ revdate = "$Date: 2007-03-26 00:09:04 -0400 (Mon, 26 Mar 2007) $"
+
+integer :: calendar_type = GREGORIAN
+integer :: file_unit, seconds, &
+ year, month, day, hour, minute, second, &
+ cam_date, cam_tod
+type(time_type) :: dart_time
+character (len = 128) :: file_name = 'date_greg'
+
+call initialize_utilities('Trans_date_to_dart')
+
+call set_calendar_type(calendar_type)
+! debug
+seconds = get_calendar_type()
+PRINT*,'calendar type = ',seconds
+
+file_unit = get_unit()
+PRINT*,'file_unit = ',file_unit
+
+! read in date and time
+open(unit = file_unit, file = file_name, status='old',form='formatted')
+read(file_unit, '(I4,5(1X,I2))') year, month, day, hour, minute, second
+PRINT*,'read in date = ',year, month, day, hour, minute, second
+
+! create and write DART date (Gregorian)
+dart_time = set_date(year, month, day, hour, minute, second)
+call write_time (file_unit,dart_time)
+
+! create and write CAM date
+cam_date = (year)*10000 + month*100 + day
+cam_tod = hour*3600 + minute*60 + second
+write (file_unit,'(2I8)') cam_date, cam_tod
+
+close(unit = file_unit)
+
+call finalize_utilities()
+
+end program trans_date_to_dart
Property changes on: DART/trunk/models/am2/trans_date_to_dart.f90
___________________________________________________________________
Name: svn:keywords
+ "Date Rev Author URL Id"
Added: DART/trunk/models/am2/trans_pv_sv.f90
===================================================================
--- DART/trunk/models/am2/trans_pv_sv.f90 (rev 0)
+++ DART/trunk/models/am2/trans_pv_sv.f90 2008-04-04 22:14:26 UTC (rev 3291)
@@ -0,0 +1,124 @@
+! Data Assimilation Research Testbed -- DART
+! Copyright 2004-2007, Data Assimilation Research Section
+! University Corporation for Atmospheric Research
+! Licensed under the GPL -- www.gpl.org/licenses/gpl.html
+
+program trans_pv_sv
+
+! <next few lines under version control, do not edit>
+! $URL$
+! $Id: trans_pv_sv.f90 2713 2007-03-26 04:09:04Z thoar $
+! $Revision$
+! $Date: 2007-03-26 00:09:04 -0400 (Mon, 26 Mar 2007) $
+
+!----------------------------------------------------------------------
+! purpose: interface between CAM and DART
+!
+! method: Read CAM 'initial' file for model state, but not time (netCDF format).
+! Get target time from assim_model_state_ic (temp_ic).
+! Reform fields into a state vector.
+! Write out state vector in "proprietary" format for DART
+!
+! author: Kevin Raeder 2/21/03
+! based on prog_var_to_vector and vector_to_prog_var by Jeff Anderson
+!
+!----------------------------------------------------------------------
+
+use types_mod, only : r8
+use utilities_mod, only : get_unit, file_exist, &
+ initialize_utilities, finalize_utilities
+use model_mod, only : model_type, init_model_instance, end_model_instance, &
+ prog_var_to_vector, read_model_init
+use assim_model_mod, only : assim_model_type, static_init_assim_model, &
+ init_assim_model, get_model_size , set_model_state_vector, write_state_restart, &
+ set_model_time, open_restart_read, open_restart_write, close_restart, &
+ aread_state_restart
+! Guam; move time stripping from advance_model to here
+use time_manager_mod, only : time_type, read_time, set_time
+
+implicit none
+
+! version controlled file description for error handling, do not edit
+character(len=128), parameter :: &
+ source = "$URL$", &
+ revision = "$Revision$", &
+ revdate = "$Date: 2007-03-26 00:09:04 -0400 (Mon, 26 Mar 2007) $"
+
+! character (len = 128) :: file_name = 'caminput.nc', file_out = 'temp_ic'
+! Guam; move time stripping from script into here
+character (len = 128) :: file_name = 'fv_rst.res.nc', file_out = 'temp_ud', &
+ file_time = 'temp_ic'
+
+!----------------------------------------------------------------------
+
+! Temporary allocatable storage to read in a native format for cam state
+type(assim_model_type) :: x
+type(model_type) :: var
+type(time_type) :: model_time, adv_to_time
+real(r8), allocatable :: x_state(:)
+integer :: file_unit, x_size
+logical :: do_output = .false.
+
+call initialize_utilities('Trans_pv_sv')
+
+if(file_exist('element1')) do_output = .true.
+
+! Static init assim model calls static_init_model
+! which now (merge/MPI) calls read_model_init)
+call static_init_assim_model()
+
+! Initialize the assim_model instance
+call init_assim_model(x)
+
+! Allocate the local state vector
+x_size = get_model_size()
+allocate(x_state(x_size))
+
+! Allocate the instance of the cam model type for storage
+! Nancy; why did we comment this out?
+! Do it in read_model_init?
+! What about end_model_instance?
+! I'll just point to the space I need, not;
+call init_model_instance(var)
+
+print *, associated(var%delp)
+print *, associated(var%ps)
+
+! Read the file AM2 state fragments into var, but not time
+call read_model_init(file_name, var)
+
+print *, associated(var%delp)
+print *, associated(var%ps)
+
+
+! transform fields into state vector for DART
+call prog_var_to_vector(var, x_state)
+
+call end_model_instance(var)
+
+! Put this in the structure
+call set_model_state_vector(x, x_state)
+
+! Integration of model was controlled by the restart file,
+! so we use the target time of the restart file (from assim_model_state)
+! as the current model state time.
+!file_unit = open_restart_read(file_time)
+! We're done with x_state, so it can be uselessly filled in aread_state_restart,
+! while getting model_time.
+!call aread_state_restart(model_time, x_state, file_unit, adv_to_time)
+
+adv_to_time = set_time(0,0)
+
+call set_model_time (x, adv_to_time)
+
+!call close_restart(file_unit)
+
+! Get channel for output,
+! write out state vector in "proprietary" format
+file_unit = open_restart_write(file_out)
+call write_state_restart(x, file_unit)
+call close_restart(file_unit)
+
+call finalize_utilities()
+
+end program trans_pv_sv
Property changes on: DART/trunk/models/am2/trans_pv_sv.f90
___________________________________________________________________
Name: svn:keywords
+ "Date Rev Author URL Id"
Added: DART/trunk/models/am2/trans_pv_sv_pert0.f90
===================================================================
--- DART/trunk/models/am2/trans_pv_sv_pert0.f90 (rev 0)
+++ DART/trunk/models/am2/trans_pv_sv_pert0.f90 2008-04-04 22:14:26 UTC (rev 3291)
@@ -0,0 +1,161 @@
+! Data Assimilation Research Testbed -- DART
+! Copyright 2004-2007, Data Assimilation Research Section
+! University Corporation for Atmospheric Research
+! Licensed under the GPL -- www.gpl.org/licenses/gpl.html
+
+program trans_pv_sv_pert0
+
+! <next few lines under version control, do not edit>
+! $URL$
+! $Id: trans_pv_sv_time0.f90 2713 2007-03-26 04:09:04Z thoar $
+! $Revision$
+! $Date: 2007-03-25 22:09:04 -0600 (Sun, 25 Mar 2007) $
+
+!----------------------------------------------------------------------
+! purpose: interface between CAM and DART,
+! Modify field (esp. parameter field) values to have spread
+!
+! method: Read CAM 'initial' file (netCDF format).
+! Use ensemble member (in file 'element') to set a unique seed for init_ran1.
+! Change values by call to model_mod:pert_model_state.
+! Reform fields into a state vector.
+! Write out state vector in "proprietary" format for DART
+!
+! author: Kevin Raeder 3/21/07
+! based on trans_pv_sv_pert0
+!
+!----------------------------------------------------------------------
+
+use types_mod, only : r8
+use utilities_mod, only : logfileunit, error_handler, E_ERR, E_MSG, find_namelist_in_file, &
+ check_namelist_read, initialize_utilities, finalize_utilities
+use model_mod, only : model_type, init_model_instance, read_model_init, &
+ prog_var_to_vector, pert_model_state
+
+use assim_model_mod, only : assim_model_type, static_init_assim_model, &
+ init_assim_model, get_model_size , set_model_state_vector, write_state_restart, &
+ set_model_time, open_restart_read, open_restart_write, close_restart, &
+ aread_state_restart
+! Guam; move time stripping from advance_model to here
+use time_manager_mod, only : time_type, read_time, set_time
+
+implicit none
+
+! version controlled file description for error handling, do not edit
+character(len=128), parameter :: &
+ source = "$URL$", &
+ revision = "$Revision$", &
+ revdate = "$Date: 2007-03-25 22:09:04 -0600 (Sun, 25 Mar 2007) $"
+
+character (len = 128) :: file_name = 'caminput.nc', file_out = 'temp_ud'
+! Hawaii; file_time = 'temp_ic'
+! trans_pv_sv_pert0 should get its time from the namelist,
+! not from temp_ic, which came from filter_ics, which will not exist for a new
+! set of fields comprising the state vector.
+
+! Temporary allocatable storage to read in a native format for cam state
+type(assim_model_type) :: x
+type(model_type) :: var
+type(time_type) :: model_time
+real(r8), allocatable :: x_state(:), x_temp(:)
+integer :: file_unit, x_size, iunit, io
+
+! define exactly the same stuff as in the perfect_model_obs namelist.
+logical :: start_from_restart = .false., output_restart = .false., &
+ interf_provided
+integer :: async = 0
+integer :: init_time_days = 0, init_time_seconds = 0, output_interval = 1, &
+ first_obs_days, first_obs_seconds, &
+ last_obs_days, last_obs_seconds
+character(len = 129) :: restart_in_file_name = 'perfect_ics', &
+ restart_out_file_name = 'perfect_restart', &
+ obs_seq_in_file_name = 'obs_seq.in', &
+ obs_seq_out_file_name = 'obs_seq.out', &
+ adv_ens_command = './advance_model.csh'
+
+namelist /perfect_model_obs_nml/ &
+ start_from_restart, output_restart, async, &
+ init_time_days, first_obs_days, first_obs_seconds, last_obs_days, &
+ last_obs_seconds,init_time_seconds, output_interval, &
+ restart_in_file_name, restart_out_file_name, &
+ obs_seq_in_file_name, obs_seq_out_file_name, &
+ adv_ens_command
+
+!-------------------------
+
+call initialize_utilities('trans_sv_pv_pert0')
+
+! Static init assim model calls static_init_model
+PRINT*,'static_init_assim_model in trans_pv_sv'
+call static_init_assim_model()
+
+! Initialize the assim_model instance
+call init_assim_model(x)
+
+! Allocate the local state vector
+x_size = get_model_size()
+allocate(x_state(x_size), x_temp(x_size))
+
+! Allocate the instance of the cam model type for storage
+call init_model_instance(var)
+
+! Read the file cam state fragments into var
+call read_model_init(file_name, var)
+
+! transform fields into state vector for DART
+call prog_var_to_vector(var, x_state)
+
+! Replace mean/base value and/or add spread to (parameter) fields
+! according to the namelist entries pert_names, pert_sd, and pert_base_vals.
+! Random value (< pert_sd) will be added to pert_base_vals.
+! The seed for the random # will be derived from the ensemble member #,
+! so it should be unique for each ensemble member (and variable).
+! (This isn't always true for some random number generators).
+
+call pert_model_state(x_state, x_temp, interf_provided)
+
+! Put this in the structure
+call set_model_state_vector(x, x_temp)
+
+! Read the namelist entry
+call find_namelist_in_file("input.nml", "perfect_model_obs_nml", iunit)
+read(iunit, nml = perfect_model_obs_nml, iostat = io)
+call check_namelist_read(iunit, io, "perfect_model_obs_nml")
+
+! Record the namelist values used for the run ...
+call error_handler(E_MSG,'trans_pv_sv_pert0','perfect_model_obs_nml values are',' ',' ',' ')
+write(logfileunit, nml=perfect_model_obs_nml)
+write( * , nml=perfect_model_obs_nml)
+
+
+call filter_set_initial_time
+
+call set_model_time (x, model_time)
+call close_restart(file_unit)
+
+! Get channel for output
+file_unit = open_restart_write(file_out)
+PRINT*,'In trans_pv_sv file_out unit = ',file_unit
+PRINT*,' '
+! write out state vector in "proprietary" format
+call write_state_restart(x, file_unit)
+call close_restart(file_unit)
+
+call finalize_utilities()
+
+!-------------------------------------------------------------------------
+contains
+!-------------------------------------------------------------------------
+
+subroutine filter_set_initial_time
+
+if(init_time_days >= 0) then
+ model_time = set_time(init_time_seconds, init_time_days)
+else
+ model_time = set_time(0, 0)
+endif
+
+end subroutine filter_set_initial_time
+
+end program trans_pv_sv_pert0
+
Property changes on: DART/trunk/models/am2/trans_pv_sv_pert0.f90
___________________________________________________________________
Name: svn:keywords
+ "Date Rev Author URL Id"
Added: DART/trunk/models/am2/trans_pv_sv_time0.f90
===================================================================
--- DART/trunk/models/am2/trans_pv_sv_time0.f90 (rev 0)
+++ DART/trunk/models/am2/trans_pv_sv_time0.f90 2008-04-04 22:14:26 UTC (rev 3291)
@@ -0,0 +1,150 @@
+! Data Assimilation Research Testbed -- DART
+! Copyright 2004-2007, Data Assimilation Research Section
+! University Corporation for Atmospheric Research
+! Licensed under the GPL -- www.gpl.org/licenses/gpl.html
+
+program trans_pv_sv_time0
+
+! <next few lines under version control, do not edit>
+! $URL$
+! $Id: trans_pv_sv_time0.f90 2806 2007-04-06 20:15:09Z thoar $
+! $Revision$
+! $Date: 2007-04-06 16:15:09 -0400 (Fri, 06 Apr 2007) $
+
+!----------------------------------------------------------------------
+! purpose: interface between CAM and DART
+!
+! method: Read CAM 'initial' file (netCDF format).
+! Reform fields into a state vector.
+! Write out state vector in "proprietary" format for DART
+!
+! author: Kevin Raeder 2/21/03
+! based on prog_var_to_vector and vector_to_prog_var by Jeff Anderson
+!
+!----------------------------------------------------------------------
+
+use types_mod, only : r8
+use utilities_mod, only : logfileunit, error_handler, E_ERR, E_MSG, find_namelist_in_file, &
+ check_namelist_read, initialize_utilities, finalize_utilities
+use model_mod, only : model_type, init_model_instance, read_model_init, &
+ prog_var_to_vector
+
+use assim_model_mod, only : assim_model_type, static_init_assim_model, &
+ init_assim_model, get_model_size , set_model_state_vector, write_state_restart, &
+ set_model_time, open_restart_read, open_restart_write, close_restart, &
+ aread_state_restart
+! Guam; move time stripping from advance_model to here
+use time_manager_mod, only : time_type, read_time, set_time
+
+implicit none
+
+! version controlled file description for error handling, do not edit
+character(len=128), parameter :: &
+ source = "$URL$", &
+ revision = "$Revision$", &
+ revdate = "$Date: 2007-04-06 16:15:09 -0400 (Fri, 06 Apr 2007) $"
+
+character (len = 128) :: file_name = 'caminput.nc', file_out = 'temp_ud'
+! Hawaii; file_time = 'temp_ic'
+! trans_pv_sv_time0 should get it's time from the namelist,
+! not from temp_ic, which came from filter_ics, which will not exist for a new
+! set of fields comprising the state vector.
+
+! Temporary allocatable storage to read in a native format for cam state
+type(assim_model_type) :: x
+type(model_type) :: var
+type(time_type) :: model_time
+real(r8), allocatable :: x_state(:), x_temp(:)
+integer :: file_unit, x_size, iunit, io
+
+! define exactly the same stuff as in the perfect_model_obs namelist.
+logical :: start_from_restart = .false., output_restart = .false., &
+ interf_provided
+integer :: async = 0
+integer :: init_time_days = 0, init_time_seconds = 0, output_interval = 1, &
+ first_obs_days, first_obs_seconds, &
+ last_obs_days, last_obs_seconds
+character(len = 129) :: restart_in_file_name = 'perfect_ics', &
+ restart_out_file_name = 'perfect_restart', &
+ obs_seq_in_file_name = 'obs_seq.in', &
+ obs_seq_out_file_name = 'obs_seq.out', &
+ adv_ens_command = './advance_model.csh'
+
+namelist /perfect_model_obs_nml/ &
+ start_from_restart, output_restart, async, &
+ init_time_days, first_obs_days, first_obs_seconds, last_obs_days, &
+ last_obs_seconds,init_time_seconds, output_interval, &
+ restart_in_file_name, restart_out_file_name, &
+ obs_seq_in_file_name, obs_seq_out_file_name, &
+ adv_ens_command
+
+!-------------------------
+
+call initialize_utilities('Trans_sv_pv_time0')
+
+! Static init assim model calls static_init_model
+PRINT*,'static_init_assim_model in trans_pv_sv'
+call static_init_assim_model()
+
+! Initialize the assim_model instance
+call init_assim_model(x)
+
+! Allocate the local state vector
+x_size = get_model_size()
+allocate(x_state(x_size), x_temp(x_size))
+
+! Allocate the instance of the cam model type for storage
+call init_model_instance(var)
+
+! Read the file cam state fragments into var
+call read_model_init(file_name, var)
+
+! transform fields into state vector for DART
+call prog_var_to_vector(var, x_state)
+
+! Put this in the structure
+call set_model_state_vector(x, x_state)
+
+! Read the namelist entry
+call find_namelist_in_file("input.nml", "perfect_model_obs_nml", iunit)
+read(iunit, nml = perfect_model_obs_nml, iostat = io)
+call check_namelist_read(iunit, io, "perfect_model_obs_nml")
+
+! Record the namelist values used for the run ...
+call error_handler(E_MSG,'trans_pv_sv_time0','perfect_model_obs_nml values are',' ',' ',' ')
+write(logfileunit, nml=perfect_model_obs_nml)
+write( * , nml=perfect_model_obs_nml)
+
+
+call filter_set_initial_time
+
+call set_model_time (x, model_time)
+call close_restart(file_unit)
+
+! Get channel for output
+file_unit = open_restart_write(file_out)
+PRINT*,'In trans_pv_sv file_out unit = ',file_unit
+PRINT*,' '
+! write out state vector in "proprietary" format
+call write_state_restart(x, file_unit)
+call close_restart(file_unit)
+
+call finalize_utilities()
+
+!-------------------------------------------------------------------------
+contains
+!-------------------------------------------------------------------------
+
+subroutine filter_set_initial_time
+
+if(init_time_days >= 0) then
+ model_time = set_time(init_time_seconds, init_time_days)
+else
+ model_time = set_time(0, 0)
+endif
+
+end subroutine filter_set_initial_time
+
+!-------------------------------------------------------------------------
+end program trans_pv_sv_time0
+
Property changes on: DART/trunk/models/am2/trans_pv_sv_time0.f90
___________________________________________________________________
Name: svn:keywords
+ "Date Rev Author URL Id"
Added: DART/trunk/models/am2/trans_sv_pv.f90
===================================================================
--- DART/trunk/models/am2/trans_sv_pv.f90 (rev 0)
+++ DART/trunk/models/am2/trans_sv_pv.f90 2008-04-04 22:14:26 UTC (rev 3291)
@@ -0,0 +1,113 @@
+! Data Assimilation Research Testbed -- DART
+! Copyright 2004-2007, Data Assimilation Research Section
+! University Corporation for Atmospheric Research
+! Licensed under the GPL -- www.gpl.org/licenses/gpl.html
+
+program trans_sv_pv
+
+! <next few lines under version control, do not edit>
+! $URL$
+! $Id: trans_sv_pv.f90 2713 2007-03-26 04:09:04Z thoar $
+! $Revision$
+! $Date: 2007-03-26 00:09:04 -0400 (Mon, 26 Mar 2007) $
+
+!----------------------------------------------------------------------
+! purpose: interface between CAM and DART
+!
+! method: Read DART state vector ("proprietary" format), but not time(s).
+! Reform state vector back into CAM fields.
+! Replace those fields on the CAM initial file with the new values,
+! preserving all other information on the file.
+!
+! author: Kevin Raeder 2/21/03
+! based on prog_var_to_vector and vector_to_prog_var by Jeff Anderson
+! mod: to read temp_ic (assim_model_state_ic; 2 times) or temp_ud (1 time) and put
+! the fields into the CAM initial file
+!
+!----------------------------------------------------------------------
+
+use types_mod, only : r8
+use utilities_mod, only : get_unit, file_exist, open_file, &
+ initialize_utilities, finalize_utilities
+use model_mod, only : model_type, init_model_instance, write_model_init, &
+ vector_to_prog_var
+use assim_model_mod, only : assim_model_type, static_init_assim_model, &
+ init_assim_model, get_model_size, get_model_state_vector, read_state_restart, &
+ open_restart_read, close_restart
+use time_manager_mod, only : time_type, read_time
+
+implicit none
+
+! version controlled file description for error handling, do not edit
+character(len=128), parameter :: &
+ source = "$URL$", &
+ revision = "$Revision$", &
+ revdate = "$Date: 2007-03-26 00:09:04 -0400 (Mon, 26 Mar 2007) $"
+
+type(assim_model_type) :: x
+type(model_type) :: var
+! Guam clean out advance_model
+type(time_type) :: adv_to_time
+real(r8), allocatable :: x_state(:)
+integer :: file_unit, mem_unit, x_size
+character (len = 128) :: file_name, file_in
+logical :: do_output = .false.
+
+call initialize_utilities('Trans_sv_pv')
+
+if(file_exist('element1')) do_output = .true.
+
+! Static init assim model calls static_init_model
+if (do_output) then
+ WRITE(*,'(////A)') '========================================================================='
+ PRINT*,'static_init_assim_model in trans_sv_pv'
+endif
+call static_init_assim_model()
+call init_assim_model(x)
+
+! Allocate the instance of the cam model type for storage
+call init_model_instance(var)
+
+if (file_exist( 'temp_ic' )) then
+ file_in = 'temp_ic'
+ file_name = 'caminput.nc'
+ ! Get file for DART vector input
+ file_unit = open_restart_read(file_in)
+ ! read in target time and state vector from DART and throwing away the time(s)
+ ! since those are handled by trans_time.f90
+ call read_state_restart(x, file_unit, adv_to_time)
+else if (file_exist( 'member' )) then
+ mem_unit = open_file ('member')
+ read(mem_unit,'(A)') file_in
+ read(mem_unit,'(A)') file_name
+ PRINT*,' file_in = ',file_in
+ file_unit = open_restart_read(file_in)
+ ! read state vector from DART and throw away the time
+ call read_state_restart(x, file_unit)
+endif
+
+file_in = 'temp_ud'
+file_name = 'post_fv_rst.res.nc'
+file_unit = open_restart_read(file_in)
+call read_state_restart(x, file_unit)
+
+call close_restart(file_unit)
+
+! Get the state part of the assim_model type x
+x_size = get_model_size()
+allocate(x_state(x_size))
+x_state = get_model_state_vector(x)
+
+! decompose vector back into AM2 fields
+
+call vector_to_prog_var (x_state, var)
+deallocate (x_state)
+
+! write fields to the netCDF initial file
+! merge/MPI; this requires no change; a CAM state will exist in model_mod,
+! but this will ignore it and write out *this* CAM state.
+call write_model_init(file_name, var)
+
+call finalize_utilities()
+
+end program trans_sv_pv
Property changes on: DART/trunk/models/am2/trans_sv_pv.f90
___________________________________________________________________
Name: svn:keywords
+ "Date Rev Author URL Id"
Added: DART/trunk/models/am2/trans_time.f90
===================================================================
--- DART/trunk/models/am2/trans_time.f90 (rev 0)
+++ DART/trunk/models/am2/trans_time.f90 2008-04-04 22:14:26 UTC (rev 3291)
@@ -0,0 +1,102 @@
+! Data Assimilation Research Testbed -- DART
+! Copyright 2004-2007, Data Assimilation Research Section
+! University Corporation for Atmospheric Research
+! Licensed under the GPL -- www.gpl.org/licenses/gpl.html
+
+program trans_time
+
+! <next few lines under version control, do not edit>
+! $URL$
+! $Id: trans_time.f90 2713 2007-03-26 04:09:04Z thoar $
+! $Revision$
+! $Date: 2007-03-26 00:09:04 -0400 (Mon, 26 Mar 2007) $
+
+!----------------------------------------------------------------------
+! purpose: interface between CAM and DART time and date
+!
+! method: Read DART 'state vector' file (proprietary format).
+! Reform time and date into form needed by CAM.
+! Write out CAM time and date to file for use by run-pc.csh
+!
+! author: Kevin Raeder 8/1/03
+!
+!----------------------------------------------------------------------
+
+use time_manager_mod, only : time_type, read_time, write_time, &
+ get_time, set_time, operator(-), get_date, &
+ set_calendar_type, GREGORIAN, NOLEAP
+use assim_model_mod, only : static_init_assim_model, init_assim_model, open_restart_read, close_restart, &
+ get_model_size, get_model_time, read_state_restart, assim_model_type
+use utilities_mod, only : get_unit, initialize_utilities, finalize_utilities
+use types_mod, only : r8
+
+implicit none
+
+! version controlled file description for error handling, do not edit
+character(len=128), parameter :: &
+ source = "$URL$", &
+ revision = "$Revision$", &
+ revdate = "$Date: 2007-03-26 00:09:04 -0400 (Mon, 26 Mar 2007) $"
+
+integer :: ntimes = 2, n, nhtfrq, calendar_type = GREGORIAN
+integer :: file_unit(2), year, month, day, hour, minute, second, &
+ cam_date, cam_tod
+type(time_type) :: dart_time(2), forecast_length
+character (len = 128) :: file_name = 'temp_ic', file_out = 'times'
+type(assim_model_type) :: x
+
+call initialize_utilities('Trans_time')
+
+call set_calendar_type(calendar_type)
+
+! Static init assim model calls static_init_model
+call static_init_assim_model()
+call init_assim_model(x)
+
+file_unit(1) = open_restart_read(file_name)
+call read_state_restart(x, file_unit(1), dart_time(1))
+dart_time(2) = get_model_time(x)
+
+file_unit(2) = get_unit()
+open(unit = file_unit(2), file = file_out)
+! end time is first, then beginning time
+! -namelist "&camexp START_YMD=$times[3] START_TOD=$times[4] \
+! STOP_YMD=$times[1] STOP_TOD=$times[2] NHTFRQ=$times[5] /" \
+
+do n=1,ntimes
+ call get_date(dart_time(n), year, month, day, hour, minute, second)
+ PRINT*,'date = ',year, month, day, hour, minute, second
+ if (calendar_type.eq.GREGORIAN) then
+ cam_date = year*10000 + month*100 + day
+ cam_tod = hour*3600 + minute*60 + second
+ elseif (calendar_type.eq.NOLEAP) then
+ cam_date = (1899 + year)*10000 + month*100 + day
+ cam_tod = hour*3600 + minute*60 + second
+ endif
+ write (file_unit(2),'(2I8)') cam_date, cam_tod
+enddo
+
+call close_restart(file_unit(1))
+
+! calculate number of hours in forecast, and pass to history tape write frequency
+
+forecast_length = dart_time(1) - dart_time(2)
+
+call get_time(forecast_length, second, day)
+PRINT*,'forecast length = ', day, second
+hour = second/3600
+minute = mod(second,3600)
+if (minute.ne.0) &
+ print*,' not integer number of hours; nhtfrq error in trans_time'
+
+! convert to hours, and negative to signal units are hours
+
+! nhtfrq = -1*((((year-1)*365 + (month-1))*30 + (day-1))*24 + hour)
+nhtfrq = -1*(day*24 + hour)
+write (file_unit(2),'(I8)') nhtfrq
+
+close(file_unit(2))
+
+call finalize_utilities()
+
+end program trans_time
Property changes on: DART/trunk/models/am2/trans_time.f90
___________________________________________________________________
Name: svn:keywords
+ "Date Rev Author URL Id"
Added: DART/trunk/models/am2/work/atmos_coupled.res.nc
===================================================================
(Binary files differ)
Property changes on: DART/trunk/models/am2/work/atmos_coupled.res.nc
___________________________________________________________________
Name: svn:mime-type
+ application/x-netcdf
Added: DART/trunk/models/am2/work/atmos_tracers.res.nc
===================================================================
(Binary files differ)
Property changes on: DART/trunk/models/am2/work/atmos_tracers.res.nc
___________________________________________________________________
Name: svn:mime-type
+ application/x-netcdf
Added: DART/trunk/models/am2/work/fv_rst.res.nc
===================================================================
(Binary files differ)
Property changes on: DART/trunk/models/am2/work/fv_rst.res.nc
___________________________________________________________________
Name: svn:mime-type
+ application/x-netcdf
Added: DART/trunk/models/am2/work/input.nml
===================================================================
--- DART/trunk/models/am2/work/input.nml (rev 0)
+++ DART/trunk/models/am2/work/input.nml 2008-04-04 22:14:26 UTC (rev 3291)
@@ -0,0 +1,197 @@
+&perfect_model_obs_nml
+ start_from_restart = .true.,
+ output_restart = .true.,
+ async = 2,
+ init_time_days = 139338,
+ init_time_seconds = 0,
+ first_obs_days = -1,
+ first_obs_seconds = -1,
+ last_obs_days = -1,
+ last_obs_seconds = -1,
+ output_interval = 1,
+ restart_in_file_name = "perfect_ic_old",
+ restart_out_file_name = "perfect_ic_new"
+ obs_seq_in_file_name = "obs_seq.in",
+ obs_seq_out_file_name = "obs_seq.out",
+ adv_ens_command = "./advance_model.csh" /
+
+&filter_nml
+ async = 2,
+ adv_ens_command = "./advance_model.csh",
+ ens_size = 10,
+ start_from_restart = .true.,
+ output_restart = .true.,
+ obs_sequence_in_name = "obs_seq.out",
+ obs_sequence_out_name = "obs_seq.final",
+ restart_in_file_name = "filter_ics",
+ restart_out_file_name = "filter_ic_new",
+ init_time_days = 139338,
+ init_time_seconds = 0,
+ first_obs_days = -1,
+ first_obs_seconds = -1,
+ last_obs_days = -1,
+ last_obs_seconds = -1,
+ num_output_state_members = 0,
+ num_output_obs_members = 0,
+ output_interval = 1,
+ num_groups = 1,
+ input_qc_threshold = 4.0,
+ outlier_threshold = 3.0,
+ output_forward_op_errors = .false.,
+
+ inf_flavor = 0, 0,
+ inf_start_from_restart = .false., .false.,
+ inf_output_restart = .true., .true.,
+ inf_deterministic = .true., .true.,
+ inf_in_file_name = 'prior_inf_ic_old', 'post_inf_ic_old',
+ inf_out_file_name = 'prior_inf_ic_new', 'post_inf_ic_new',
+ inf_diag_file_name = 'prior_inf_diag', 'post_inf_diag',
+ inf_initial = 1.00, 1.00,
+ inf_lower_bound = 1.0, 1.0,
+ inf_upper_bound = 1000000.0, 1000000.0,
+ inf_sd_initial = 0.1, 0.1,
+ inf_sd_lower_bound = 0.0, 0.0
+/
+! ! inf_flavor is 0:none, 1:obs space, 2: varying state space, 3: fixed state_space
+
+&smoother_nml
+ num_lags = 0
+ start_from_restart = .false.
+ output_restart = .false.
+ restart_in_file_name = 'smoother_ics'
+ restart_out_file_name = 'smoother_restart' /
+
+&ensemble_manager_nml
+ single_restart_file_in = .false.,
+ single_restart_file_out = .false. /
+
+&assim_tools_nml
+ filter_kind = 1,
+ cutoff = 0.2,
+ sort_obs_inc = .true.,
+ spread_restoration = .false.,
+ sampling_error_correction = .false.,
+ print_every_nth_obs = 0,
+ adaptive_localization_threshold = -1/
+
+&cov_cutoff_nml
+ select_localization = 1 /
+
+®_factor_nml
+ select_regression = 1,
+ input_reg_file = "time_mean_reg"
+ save_reg_diagnostics = .false.,
+ reg_diagnostics_file = 'reg_diagnostics' /
+
+&obs_sequence_nml
+ write_binary_obs_sequence = .false. /
+
+&assim_model_nml
+ write_binary_restart_files = .true. /
+
+&model_nml
+ model_config_file = 'fv_rst.res.nc',
+ model_restart_file = 'fv_rst.res.nc',
+ tracer_names = 'liq_wat',
+ 'ice_wat',
+ 'cld_amt',
+ tracer_files = 'atmos_tracers.res.nc',
+ 'atmos_tracers.res.nc',
+ 'atmos_tracers.res.nc',
+ tracer_config_files = 'atmos_tracers.res.nc',
+ 'atmos_tracers.res.nc',
+ 'atmos_tracers.res.nc',
+ tracer_obs_kind_names =
+ 'CLOUD_LIQUID_WATER',
+ 'CLOUD_ICE',
+ 'CLOUD_FRACTION'
+ output_state_vector = .false.,
+ highest_obs_pressure_mb = 100.0,
+ highest_state_pressure_mb = 150.0,
+ max_obs_lat_degree = 85.0,
+ Time_step_seconds = 21600,
+ Time_step_days = 0 /
+
+&location_nml
+ horiz_dist_only = .false.,
+ vert_normalization_pressure = 100000.0,
+ vert_normalization_height = 10000.0,
+ vert_normalization_level = 20.0,
+ approximate_distance = .true.,
+ nlon = 71,
+ nlat = 36 /
+
+&preprocess_nml
+ input_obs_kind_mod_file = '../../../obs_kind/DEFAULT_obs_kind_mod.F90',
+ output_obs_kind_mod_file = '../../../obs_kind/obs_kind_mod.f90',
+ input_obs_def_mod_file = '../../../obs_def/DEFAULT_obs_def_mod.F90',
+ output_obs_def_mod_file = '../../../obs_def/obs_def_mod.f90',
+ input_files = '../../../obs_def/obs_def_reanalysis_bufr_mod.f90',
+ '../../../obs_def/obs_def_cloudfrac_mod.f90',
+ '../../../obs_def/obs_def_clw_mod.f90' /
+
+!'../../../obs_def/obs_def_gps_mod.f90',
+!'../../../obs_def/obs_def_QuikSCAT_mod.f90',
+
+
+&obs_kind_nml
+ assimilate_these_obs_types = 'RADIOSONDE_TEMPERATURE',
+ 'ACARS_TEMPERATURE',
+ 'AIRCRAFT_TEMPERATURE',
+ 'RADIOSONDE_U_WIND_COMPONENT',
+ 'RADIOSONDE_V_WIND_COMPONENT',
+ 'AIRCRAFT_U_WIND_COMPONENT',
+ 'AIRCRAFT_V_WIND_COMPONENT',
+ 'ACARS_U_WIND_COMPONENT',
+ 'ACARS_V_WIND_COMPONENT',
+ 'SAT_U_WIND_COMPONENT',
+ 'SAT_V_WIND_COMPONENT',
+ 'CLOUD_LIQUID_WATER',
+ 'CLOUD_ICE',
+ 'CLOUD_FRACTION' /
+
+! evaluate_these_obs_types = 'RADIOSONDE_SURFACE_PRESSURE' /
+
+
+&utilities_nml
+ TERMLEVEL = 1,
+ logfilename = 'dart_log.out' /
+
+&mpi_utilities_nml
+ /
+
+# The times in the namelist for the obs_diag program are vectors
+# that follow the following sequence:
+# year month day hour minute second
+# max_num_bins can be used to specify a fixed number of bins,
+# in which case last_bin_center should be safely in the future.
+#
+# Acceptable latitudes range from [-90, 90]
+# Acceptable longitudes range from [ 0, 360]
+
+&obs_diag_nml
+ obs_sequence_name = 'obs_seq.final',
+ first_bin_center = 2003, 1, 1, 6, 0, 0 ,
+ last_bin_center = 2003, 1, 2, 0, 0, 0 ,
+ bin_separation = 0, 0, 0, 6, 0, 0 ,
+ bin_width = 0, 0, 0, 6, 0, 0 ,
+ time_to_skip = 0, 0, 1, 0, 0, 0 ,
+ max_num_bins = 1000,
+ plevel = 500,
+ hlevel = 5000,
+ obs_select = 1,
+ Nregions = 4,
+ rat_cri = 3.0,
+ input_qc_threshold = 4.0,
+ lonlim1 = 0.0, 0.0, 0.0, 235.0,
+ lonlim2 = 360.0, 360.0, 360.0, 295.0,
+ latlim1 = 20.0, -80.0, -20.0, 25.0,
+ latlim2 = 80.0, -20.0, 20.0, 55.0,
+ reg_names = 'Northern Hemisphere', 'Southern Hemisphere', 'Tropics', 'North America',
+ print_mismatched_locs = .false.,
+ verbose = .false. /
+
+&merge_obs_seq_nml
+ num_input_files = 1,
+ filename_seq = 'obs_seq_bin.out',
+ filename_out = 'obs_seq_ascii.out' /
Property changes on: DART/trunk/models/am2/work/input.nml
___________________________________________________________________
Name: svn:mime-type
+ text/text
Added: DART/trunk/models/am2/work/mkmf_column_rand
===================================================================
--- DART/trunk/models/am2/work/mkmf_column_rand (rev 0)
+++ DART/trunk/models/am2/work/mkmf_column_rand 2008-04-04 22:14:26 UTC (rev 3291)
@@ -0,0 +1,15 @@
+#!/bin/csh
+#
+# Data Assimilation Research Testbed -- DART
+# Copyright 2004-2007, Data Assimilation Research Section
+# University Corporation for Atmospheric Research
+# Licensed under the GPL -- www.gpl.org/licenses/gpl.html
+#
+# <next few lines under version control, do not edit>
+# $URL: https://proxy.subversion.ucar.edu/DAReS/DART/trunk/models/cam/work/mkmf_column_rand $
+# $Id: mkmf_column_rand 2691 2007-03-11 18:18:09Z thoar $
+# $Revision: 2691 $
+# $Date: 2007-03-11 14:18:09 -0400 (Sun, 11 Mar 2007) $
+#
+../../../mkmf/mkmf -p column_rand -t ../../../mkmf/mkmf.template -c"-Duse_netCDF" \
+ -a "../../.." path_names_column_rand
Property changes on: DART/trunk/models/am2/work/mkmf_column_rand
___________________________________________________________________
Name: svn:executable
+ *
Added: DART/trunk/models/am2/work/mkmf_create_fixed_network_seq
===================================================================
--- DART/trunk/models/am2/work/mkmf_create_fixed_network_seq (rev 0)
+++ DART/trunk/models/am2/work/mkmf_create_fixed_network_seq 2008-04-04 22:14:26 UTC (rev 3291)
@@ -0,0 +1,15 @@
+#!/bin/csh
+#
+# Data Assimilation Research Testbed -- DART
+# Copyright 2004-2007, Data Assimilation Research Section
+# University Corporation for Atmospheric Research
+# Licensed under the GPL -- www.gpl.org/licenses/gpl.html
+#
+# <next few lines under version control, do not edit>
+# $URL: https://proxy.subversion.ucar.edu/DAReS/DART/trunk/models/cam/work/mkmf_create_fixed_network_seq $
+# $Id: mkmf_create_fixed_network_seq 2691 2007-03-11 18:18:09Z thoar $
+# $Revision: 2691 $
+# $Date: 2007-03-11 14:18:09 -0400 (Sun, 11 Mar 2007) $
+#
+../../../mkmf/mkmf -p create_fixed_network_seq -t ../../../mkmf/mkmf.template -c"-Duse_netCDF" \
+ -a "../../.." path_names_create_fixed_network_seq
Property changes on: DART/trunk/models/am2/work/mkmf_create_fixed_network_seq
___________________________________________________________________
Name: svn:executable
+ *
Added: DART/trunk/models/am2/work/mkmf_create_obs_sequence
===================================================================
--- DART/trunk/models/am2/work/mkmf_create_obs_sequence (rev 0)
+++ DART/trunk/models/am2/work/mkmf_create_obs_sequence 2008-04-04 22:14:26 UTC (rev 3291)
@@ -0,0 +1,15 @@
+#!/bin/csh
+#
+# Data Assimilation Research Testbed -- DART
+# Copyright 2004-2007, Data Assimilation Research Section
+# University Corporation for Atmospheric Research
+# Licensed under the GPL -- www.gpl.org/licenses/gpl.html
+#
+# <next few lines under version control, do not edit>
+# $URL: https://proxy.subversion.ucar.edu/DAReS/DART/trunk/models/cam/work/mkmf_create_obs_sequence $
+# $Id: mkmf_create_obs_sequence 2691 2007-03-11 18:18:09Z thoar $
+# $Revision: 2691 $
+# $Date: 2007-03-11 14:18:09 -0400 (Sun, 11 Mar 2007) $
+#
+../../../mkmf/mkmf -p create_obs_sequence -t ../../../mkmf/mkmf.template -c"-Duse_netCDF" \
+ -a "../../.." path_names_create_obs_sequence
Property changes on: DART/trunk/models/am2/work/mkmf_create_obs_sequence
___________________________________________________________________
Name: svn:executable
+ *
Added: DART/trunk/models/am2/work/mkmf_filter
===================================================================
--- DART/trunk/models/am2/work/mkmf_filter (rev 0)
+++ DART/trunk/models/am2/work/mkmf_filter 2008-04-04 22:14:26 UTC (rev 3291)
@@ -0,0 +1,59 @@
+#!/bin/csh
+#
+# Data Assimilation Research Testbed -- DART
+# Copyright 2004-2007, Data Assimilation Research Section
+# University Corporation for Atmospheric Research
+# Licensed under the GPL -- www.gpl.org/licenses/gpl.html
+#
+# <next few lines under version control, do not edit>
+# $URL: https://proxy.subversion.ucar.edu/DAReS/DART/trunk/models/cam/work/mkmf_filter $
+# $Id: mkmf_filter 2691 2007-03-11 18:18:09Z thoar $
+# $Revision: 2691 $
+# $Date: 2007-03-11 14:18:09 -0400 (Sun, 11 Mar 2007) $
+
+# usage: mkmf_filter [ -mpi ]
+# without any args, builds filter without mpi libraries, and it will run
+# as a normal executable. if -mpi is given, it will be compiled with the mpi
+# libraries and can run with multiple cooperating processes.
+
+if ( $#argv > 0 ) then
+ if ("$argv[1]" == "-mpi") then
+ setenv usingmpi 1
+ else
+ echo "Unrecognized argument to mkmf_filter: $argv[1]"
+ echo "Usage: mkmf_filter [ -mpi ]"
+ echo " default is to generate a Makefile without MPI support."
+ exit -1
+ endif
+else
+ setenv usingmpi 0
+endif
+
+
+# make a backup copy of the path_names file, and then use
+# sed to make sure it includes either the non-mpi subroutines,
+# or the subroutines which really call mpi.
+cp -f path_names_filter path_names_filter.back
+
+if ( $usingmpi ) then
+
+ echo "Making Makefile with MPI"
+ touch using_mpi_for_filter
+ sed -e 's;/null_mpi_util;/mpi_util;' path_names_filter.back >! path_names_filter
+ setenv wrapper_arg -w
+
+else
+
+ echo "Making Makefile without MPI"
+ rm -f using_mpi_for_filter
+ sed -e 's;/mpi_util;/null_mpi_util;' path_names_filter.back >! path_names_filter
+ setenv wrapper_arg ""
+
+endif
+
+# remove temp file and now really call mkmf to generate makefile
+rm -f path_names_filter.back
+
+../../../mkmf/mkmf -p filter -t ../../../mkmf/mkmf.template -c"-Duse_netCDF" \
+ -a "../../.." ${wrapper_arg} path_names_filter
+
Property changes on: DART/trunk/models/am2/work/mkmf_filter
___________________________________________________________________
Name: svn:executable
+ *
Added: DART/trunk/models/am2/work/mkmf_merge_obs_seq
===================================================================
--- DART/trunk/models/am2/work/mkmf_merge_obs_seq (rev 0)
+++ DART/trunk/models/am2/work/mkmf_merge_obs_seq 2008-04-04 22:14:26 UTC (rev 3291)
@@ -0,0 +1,15 @@
+#!/bin/csh
+#
+# Data Assimilation Research Testbed -- DART
+# Copyright 2004-2007, Data Assimilation Research Section
+# University Corporation for Atmospheric Research
+# Licensed under the GPL -- www.gpl.org/licenses/gpl.html
+#
+# <next few lines under version control, do not edit>
+# $URL: https://proxy.subversion.ucar.edu/DAReS/DART/trunk/models/cam/work/mkmf_merge_obs_seq $
+# $Id: mkmf_merge_obs_seq 2691 2007-03-11 18:18:09Z thoar $
+# $Revision: 2691 $
+# $Date: 2007-03-11 14:18:09 -0400 (Sun, 11 Mar 2007) $
+#
+../../../mkmf/mkmf -p merge_obs_seq -t ../../../mkmf/mkmf.template -c"-Duse_netCDF" \
+ -a "../../.." path_names_merge_obs_seq
Property changes on: DART/trunk/models/am2/work/mkmf_merge_obs_seq
___________________________________________________________________
Name: svn:executable
+ *
Added: DART/trunk/models/am2/work/mkmf_obs_diag
===================================================================
--- DART/trunk/models/am2/work/mkmf_obs_diag (rev 0)
+++ DART/trunk/models/am2/work/mkmf_obs_diag 2008-04-04 22:14:26 UTC (rev 3291)
@@ -0,0 +1,15 @@
+#!/bin/csh
+#
+# Data Assimilation Research Testbed -- DART
+# Copyright 2004-2007, Data Assimilation Research Section
+# University Corporation for Atmospheric Research
+# Licensed under the GPL -- www.gpl.org/licenses/gpl.html
+#
+# <next few lines under version control, do not edit>
+# $URL: https://proxy.subversion.ucar.edu/DAReS/DART/trunk/models/cam/work/mkmf_obs_diag $
+# $Id: mkmf_obs_diag 2691 2007-03-11 18:18:09Z thoar $
+# $Revision: 2691 $
+# $Date: 2007-03-11 14:18:09 -0400 (Sun, 11 Mar 2007) $
+#
+../../../mkmf/mkmf -p obs_diag -t ../../../mkmf/mkmf.template -c"-Duse_netCDF" \
+ -a "../../.." path_names_obs_diag
Property changes on: DART/trunk/models/am2/work/mkmf_obs_diag
___________________________________________________________________
Name: svn:executable
+ *
Added: DART/trunk/models/am2/work/mkmf_perfect_model_obs
===================================================================
--- DART/trunk/models/am2/work/mkmf_perfect_model_obs (rev 0)
+++ DART/trunk/models/am2/work/mkmf_perfect_model_obs 2008-04-04 22:14:26 UTC (rev 3291)
@@ -0,0 +1,15 @@
+#!/bin/csh
+#
+# Data Assimilation Research Testbed -- DART
+# Copyright 2004-2007, Data Assimilation Research Section
+# University Corporation for Atmospheric Research
+# Licensed under the GPL -- www.gpl.org/licenses/gpl.html
+#
+# <next few lines under version control, do not edit>
+# $URL: https://proxy.subversion.ucar.edu/DAReS/DART/trunk/models/cam/work/mkmf_perfect_model_obs $
+# $Id: mkmf_perfect_model_obs 2691 2007-03-11 18:18:09Z thoar $
+# $Revision: 2691 $
+# $Date: 2007-03-11 14:18:09 -0400 (Sun, 11 Mar 2007) $
+#
+../../../mkmf/mkmf -p perfect_model_obs -t ../../../mkmf/mkmf.template -c"-Duse_netCDF" \
+ -a "../../.." path_names_perfect_model_obs
Property changes on: DART/trunk/models/am2/work/mkmf_perfect_model_obs
___________________________________________________________________
Name: svn:executable
+ *
Added: DART/trunk/models/am2/work/mkmf_preprocess
===================================================================
--- DART/trunk/models/am2/work/mkmf_preprocess (rev 0)
+++ DART/trunk/models/am2/work/mkmf_preprocess 2008-04-04 22:14:26 UTC (rev 3291)
@@ -0,0 +1,15 @@
+#!/bin/csh
+#
+# Data Assimilation Research Testbed -- DART
+# Copyright 2004-2007, Data Assimilation Research Section
+# University Corporation for Atmospheric Research
+# Licensed under the GPL -- www.gpl.org/licenses/gpl.html
+#
+# <next few lines under version control, do not edit>
+# $URL: https://proxy.subversion.ucar.edu/DAReS/DART/trunk/models/cam/work/mkmf_preprocess $
+# $Id: mkmf_preprocess 2691 2007-03-11 18:18:09Z thoar $
+# $Revision: 2691 $
+# $Date: 2007-03-11 14:18:09 -0400 (Sun, 11 Mar 2007) $
+#
+../../../mkmf/mkmf -p preprocess -t ../../../mkmf/mkmf.template -c"-Duse_netCDF" \
+ -a "../../.." path_names_preprocess
Property changes on: DART/trunk/models/am2/work/mkmf_preprocess
___________________________________________________________________
Name: svn:executable
+ *
Added: DART/trunk/models/am2/work/mkmf_trans_date_to_dart
===================================================================
--- DART/trunk/models/am2/work/mkmf_trans_date_to_dart (rev 0)
+++ DART/trunk/models/am2/work/mkmf_trans_date_to_dart 2008-04-04 22:14:26 UTC (rev 3291)
@@ -0,0 +1,15 @@
+#!/bin/csh
+#
+# Data Assimilation Research Testbed -- DART
+# Copyright 2004-2007, Data Assimilation Research Section
+# University Corporation for Atmospheric Research
+# Licensed under the GPL -- www.gpl.org/licenses/gpl.html
+#
+# <next few lines under version control, do not edit>
+# $URL: https://proxy.subversion.ucar.edu/DAReS/DART/trunk/models/cam/work/mkmf_trans_date_to_dart $
+# $Id: mkmf_trans_date_to_dart 2691 2007-03-11 18:18:09Z thoar $
+# $Revision: 2691 $
+# $Date: 2007-03-11 14:18:09 -0400 (Sun, 11 Mar 2007) $
+#
+../../../mkmf/mkmf -p trans_date_to_dart -t ../../../mkmf/mkmf.template -c "-Duse_netCDF" \
+ -a "../../.." path_names_trans_date_to_dart
Property changes on: DART/trunk/models/am2/work/mkmf_trans_date_to_dart
___________________________________________________________________
Name: svn:executable
+ *
Added: DART/trunk/models/am2/work/mkmf_trans_pv_sv
===================================================================
--- DART/trunk/models/am2/work/mkmf_trans_pv_sv (rev 0)
+++ DART/trunk/models/am2/work/mkmf_trans_pv_sv 2008-04-04 22:14:26 UTC (rev 3291)
@@ -0,0 +1,15 @@
+#!/bin/csh
+#
+# Data Assimilation Research Testbed -- DART
+# Copyright 2004-2007, Data Assimilation Research Section
+# University Corporation for Atmospheric Research
+# Licensed under the GPL -- www.gpl.org/licenses/gpl.html
+#
+# <next few lines under version control, do not edit>
+# $URL: https://proxy.subversion.ucar.edu/DAReS/DART/trunk/models/cam/work/mkmf_trans_pv_sv $
+# $Id: mkmf_trans_pv_sv 2691 2007-03-11 18:18:09Z thoar $
+# $Revision: 2691 $
+# $Date: 2007-03-11 14:18:09 -0400 (Sun, 11 Mar 2007) $
+#
+../../../mkmf/mkmf -p trans_pv_sv -t ../../../mkmf/mkmf.template -c "-Duse_netCDF" \
+ -a "../../.." path_names_trans_pv_sv
Property changes on: DART/trunk/models/am2/work/mkmf_trans_pv_sv
___________________________________________________________________
Name: svn:executable
+ *
Added: DART/trunk/models/am2/work/mkmf_trans_pv_sv_pert0
===================================================================
--- DART/trunk/models/am2/work/mkmf_trans_pv_sv_pert0 (rev 0)
+++ DART/trunk/models/am2/work/mkmf_trans_pv_sv_pert0 2008-04-04 22:14:26 UTC (rev 3291)
@@ -0,0 +1,15 @@
+#!/bin/csh
+#
+# Data Assimilation Research Testbed -- DART
+# Copyright 2004-2007, Data Assimilation Research Section
+# University Corporation for Atmospheric Research
+# Licensed under the GPL -- www.gpl.org/licenses/gpl.html
+#
+# <next few lines under version control, do not edit>
+# $URL: http://subversion.ucar.edu/DAReS/DART/trunk/models/cam/work/mkmf_trans_pv_sv_time0 $
+# $Id: mkmf_trans_pv_sv_time0 2691 2007-03-11 18:18:09Z thoar $
+# $Revision: 2691 $
+# $Date: 2007-03-11 12:18:09 -0600 (Sun, 11 Mar 2007) $
+#
+../../../mkmf/mkmf -p trans_pv_sv_pert0 -t ../../../mkmf/mkmf.template -c "-Duse_netCDF" \
+ -a "../../.." path_names_trans_pv_sv_pert0
Property changes on: DART/trunk/models/am2/work/mkmf_trans_pv_sv_pert0
___________________________________________________________________
Name: svn:executable
+ *
Added: DART/trunk/models/am2/work/mkmf_trans_pv_sv_time0
===================================================================
--- DART/trunk/models/am2/work/mkmf_trans_pv_sv_time0 (rev 0)
+++ DART/trunk/models/am2/work/mkmf_trans_pv_sv_time0 2008-04-04 22:14:26 UTC (rev 3291)
@@ -0,0 +1,15 @@
+#!/bin/csh
+#
+# Data Assimilation Research Testbed -- DART
+# Copyright 2004-2007, Data Assimilation Research Section
+# University Corporation for Atmospheric Research
+# Licensed under the GPL -- www.gpl.org/licenses/gpl.html
+#
+# <next few lines under version control, do not edit>
+# $URL: https://proxy.subversion.ucar.edu/DAReS/DART/trunk/models/cam/work/mkmf_trans_pv_sv_time0 $
+# $Id: mkmf_trans_pv_sv_time0 2691 2007-03-11 18:18:09Z thoar $
+# $Revision: 2691 $
+# $Date: 2007-03-11 14:18:09 -0400 (Sun, 11 Mar 2007) $
+#
+../../../mkmf/mkmf -p trans_pv_sv_time0 -t ../../../mkmf/mkmf.template -c "-Duse_netCDF" \
+ -a "../../.." path_names_trans_pv_sv_time0
Property changes on: DART/trunk/models/am2/work/mkmf_trans_pv_sv_time0
___________________________________________________________________
Name: svn:executable
+ *
Added: DART/trunk/models/am2/work/mkmf_trans_sv_pv
===================================================================
--- DART/trunk/models/am2/work/mkmf_trans_sv_pv (rev 0)
+++ DART/trunk/models/am2/work/mkmf_trans_sv_pv 2008-04-04 22:14:26 UTC (rev 3291)
@@ -0,0 +1,15 @@
+#!/bin/csh
+#
+# Data Assimilation Research Testbed -- DART
+# Copyright 2004-2007, Data Assimilation Research Section
+# University Corporation for Atmospheric Research
+# Licensed under the GPL -- www.gpl.org/licenses/gpl.html
+#
+# <next few lines under version control, do not edit>
+# $URL: https://proxy.subversion.ucar.edu/DAReS/DART/trunk/models/cam/work/mkmf_trans_sv_pv $
+# $Id: mkmf_trans_sv_pv 2691 2007-03-11 18:18:09Z thoar $
+# $Revision: 2691 $
+# $Date: 2007-03-11 14:18:09 -0400 (Sun, 11 Mar 2007) $
+#
+../../../mkmf/mkmf -p trans_sv_pv -t ../../../mkmf/mkmf.template -c "-Duse_netCDF" \
+ -a "../../.." path_names_trans_sv_pv
Property changes on: DART/trunk/models/am2/work/mkmf_trans_sv_pv
___________________________________________________________________
Name: svn:executable
+ *
Added: DART/trunk/models/am2/work/mkmf_trans_time
===================================================================
--- DART/trunk/models/am2/work/mkmf_trans_time (rev 0)
+++ DART/trunk/models/am2/work/mkmf_trans_time 2008-04-04 22:14:26 UTC (rev 3291)
@@ -0,0 +1,15 @@
+#!/bin/csh
+#
+# Data Assimilation Research Testbed -- DART
+# Copyright 2004-2007, Data Assimilation Research Section
+# University Corporation for Atmospheric Research
+# Licensed under the GPL -- www.gpl.org/licenses/gpl.html
+#
+# <next few lines under version control, do not edit>
+# $URL: https://proxy.subversion.ucar.edu/DAReS/DART/trunk/models/cam/work/mkmf_trans_time $
+# $Id: mkmf_trans_time 2691 2007-03-11 18:18:09Z thoar $
+# $Revision: 2691 $
+# $Date: 2007-03-11 14:18:09 -0400 (Sun, 11 Mar 2007) $
+#
+../../../mkmf/mkmf -p trans_time -t ../../../mkmf/mkmf.template -c "-Duse_netCDF" \
+ -a "../../.." path_names_trans_time
Property changes on: DART/trunk/models/am2/work/mkmf_trans_time
___________________________________________________________________
Name: svn:executable
+ *
Added: DART/trunk/models/am2/work/mkmf_wakeup_filter
===================================================================
--- DART/trunk/models/am2/work/mkmf_wakeup_filter (rev 0)
+++ DART/trunk/models/am2/work/mkmf_wakeup_filter 2008-04-04 22:14:26 UTC (rev 3291)
@@ -0,0 +1,59 @@
+#!/bin/csh
+#
+# Data Assimilation Research Testbed -- DART
+# Copyright 2004-2007, Data Assimilation Research Section
+# University Corporation for Atmospheric Research
+# Licensed under the GPL -- www.gpl.org/licenses/gpl.html
+#
+# <next few lines under version control, do not edit>
+# $URL: https://proxy.subversion.ucar.edu/DAReS/DART/trunk/models/cam/work/mkmf_wakeup_filter $
+# $Id: mkmf_wakeup_filter 2691 2007-03-11 18:18:09Z thoar $
+# $Revision: 2691 $
+# $Date: 2007-03-11 14:18:09 -0400 (Sun, 11 Mar 2007) $
+
+# usage: mkmf_wakeup_filter [ -mpi ]
+# without any args, builds wakeup_filter without mpi libraries, and it will run
+# as a normal executable. if -mpi is given, it will be compiled with the mpi
+# libraries and can run with multiple cooperating processes.
+
+if ( $#argv > 0 ) then
+ if ("$argv[1]" == "-mpi") then
+ setenv usingmpi 1
+ else
+ echo "Unrecognized argument to mkmf_wakeup_filter: $argv[1]"
+ echo "Usage: mkmf_wakeup_filter [ -mpi ]"
+ echo " default is to generate a Makefile without MPI support."
+ exit -1
+ endif
+else
+ setenv usingmpi 0
+endif
+
+
+# make a backup copy of the path_names file, and then use
+# sed to make sure it includes either the non-mpi subroutines,
+# or the subroutines which really call mpi.
+cp -f path_names_wakeup_filter path_names_wakeup_filter.back
+
+if ( $usingmpi ) then
+
+ echo "Making Makefile with MPI"
+ touch using_mpi_for_wakeup_filter
+ sed -e 's;/null_mpi_util;/mpi_util;' path_names_wakeup_filter.back >! path_names_wakeup_filter
+ setenv wrapper_arg -w
+
+else
+
+ echo "Making Makefile without MPI"
+ rm -f using_mpi_for_wakeup_filter
+ sed -e 's;/mpi_util;/null_mpi_util;' path_names_wakeup_filter.back >! path_names_wakeup_filter
+ setenv wrapper_arg ""
+
+endif
+
+# remove temp file and now really call mkmf to generate makefile
+rm -f path_names_wakeup_filter.back
+
+../../../mkmf/mkmf -p wakeup_filter -t ../../../mkmf/mkmf.template -c"-Duse_netCDF" \
+ -a "../../.." ${wrapper_arg} path_names_wakeup_filter
+
Property changes on: DART/trunk/models/am2/work/mkmf_wakeup_filter
___________________________________________________________________
Name: svn:executable
+ *
Added: DART/trunk/models/am2/work/obs_seq.final
===================================================================
--- DART/trunk/models/am2/work/obs_seq.final (rev 0)
+++ DART/trunk/models/am2/work/obs_seq.final 2008-04-04 22:14:26 UTC (rev 3291)
@@ -0,0 +1,58 @@
+ obs_sequence
+obs_kind_definitions
+ 27
+ 1 RADIOSONDE_U_WIND_COMPONENT
+ 2 RADIOSONDE_V_WIND_COMPONENT
+ 3 RADIOSONDE_SURFACE_PRESSURE
+ 4 RADIOSONDE_TEMPERATURE
+ 5 RADIOSONDE_SPECIFIC_HUMIDITY
+ 6 AIRCRAFT_U_WIND_COMPONENT
+ 7 AIRCRAFT_V_WIND_COMPONENT
+ 8 AIRCRAFT_TEMPERATURE
+ 9 ACARS_U_WIND_COMPONENT
+ 10 ACARS_V_WIND_COMPONENT
+ 11 ACARS_TEMPERATURE
+ 12 MARINE_SFC_U_WIND_COMPONENT
+ 13 MARINE_SFC_V_WIND_COMPONENT
+ 14 MARINE_SFC_TEMPERATURE
+ 15 MARINE_SFC_SPECIFIC_HUMIDITY
+ 16 LAND_SFC_U_WIND_COMPONENT
+ 17 LAND_SFC_V_WIND_COMPONENT
+ 18 LAND_SFC_TEMPERATURE
+ 19 LAND_SFC_SPECIFIC_HUMIDITY
+ 20 SAT_U_WIND_COMPONENT
+ 21 SAT_V_WIND_COMPONENT
+ 22 ATOV_TEMPERATURE
+ 23 AIRS_TEMPERATURE
+ 24 AIRS_SPECIFIC_HUMIDITY
+ 25 CLOUD_FRACTION
+ 26 CLOUD_LIQUID_WATER
+ 27 CLOUD_ICE
+ num_copies: 6 num_qc: 2
+ num_obs: 1 max_num_obs: 1
+observations
+truth
+prior ensemble mean
+posterior ensemble mean
+prior ensemble spread
+posterior ensemble spread
+qc
+DART quality control
+ first: 1 last: 1
+ OBS 1
+ 286.317960048181
+ 286.354248046875
+ 284.875158691406
+ 286.317181401990
+ 1.36086789971284
+ 3.161424241919221E-002
+ 0.000000000000000E+000
+ 0.000000000000000E+000
+ -1 -1 -1
+obdef
+loc3d
+ 3.03250957534015 -0.54713129922808 23.00000000000000 1
+kind
+ -736941
+ 0 139338
+ 1.000000000000000E-003
Added: DART/trunk/models/am2/work/obs_seq.in
===================================================================
--- DART/trunk/models/am2/work/obs_seq.in (rev 0)
+++ DART/trunk/models/am2/work/obs_seq.in 2008-04-04 22:14:26 UTC (rev 3291)
@@ -0,0 +1,144 @@
+ obs_sequence
+obs_kind_definitions
+ 27
+ 1 RADIOSONDE_U_WIND_COMPONENT
+ 2 RADIOSONDE_V_WIND_COMPONENT
+ 3 RADIOSONDE_SURFACE_PRESSURE
+ 4 RADIOSONDE_TEMPERATURE
+ 5 RADIOSONDE_SPECIFIC_HUMIDITY
+ 6 AIRCRAFT_U_WIND_COMPONENT
+ 7 AIRCRAFT_V_WIND_COMPONENT
+ 8 AIRCRAFT_TEMPERATURE
+ 9 ACARS_U_WIND_COMPONENT
+ 10 ACARS_V_WIND_COMPONENT
+ 11 ACARS_TEMPERATURE
+ 12 MARINE_SFC_U_WIND_COMPONENT
+ 13 MARINE_SFC_V_WIND_COMPONENT
+ 14 MARINE_SFC_TEMPERATURE
+ 15 MARINE_SFC_SPECIFIC_HUMIDITY
+ 16 LAND_SFC_U_WIND_COMPONENT
+ 17 LAND_SFC_V_WIND_COMPONENT
+ 18 LAND_SFC_TEMPERATURE
+ 19 LAND_SFC_SPECIFIC_HUMIDITY
+ 20 SAT_U_WIND_COMPONENT
+ 21 SAT_V_WIND_COMPONENT
+ 22 ATOV_TEMPERATURE
+ 23 AIRS_TEMPERATURE
+ 24 AIRS_SPECIFIC_HUMIDITY
+ 25 CLOUD_FRACTION
+ 26 CLOUD_LIQUID_WATER
+ 27 CLOUD_ICE
+ num_copies: 0 num_qc: 1
+ num_obs: 11 max_num_obs: 11
+qc
+ first: 1 last: 11
+ OBS 1
+ 0.000000000000000E+000
+ -1 2 -1
+obdef
+loc3d
+ 3.03250957534015 -0.54713129922808 20.00000000000000 1
+kind
+ -736940
+ 0 139338
+ 1.000000000000000E-003
+ OBS 2
+ 0.000000000000000E+000
+ 1 3 -1
+obdef
+loc3d
+ 3.03250957534015 -0.54713129922808 21.00000000000000 1
+kind
+ -736941
+ 0 139338
+ 1.000000000000000E-003
+ OBS 3
+ 0.000000000000000E+000
+ 2 4 -1
+obdef
+loc3d
+ 3.03250957534015 -0.58243006802799 20.00000000000000 1
+kind
+ -733484
+ 0 139338
+ 1.000000000000000E-003
+ OBS 4
+ 0.000000000000000E+000
+ 3 5 -1
+obdef
+loc3d
+ 3.03250957534015 -0.58243006802799 21.00000000000000 1
+kind
+ -733485
+ 0 139338
+ 1.000000000000000E-003
+ OBS 5
+ 0.000000000000000E+000
+ 4 6 -1
+obdef
+loc3d
+ 3.07614280664001 -0.58243006802799 20.00000000000000 1
+kind
+ -733508
+ 0 139338
+ 1.000000000000000E-003
+ OBS 6
+ 0.000000000000000E+000
+ 5 7 -1
+obdef
+loc3d
+ 3.07614280664001 -0.58243006802799 21.00000000000000 1
+kind
+ -733509
+ 0 139338
+ 1.000000000000000E-003
+ OBS 7
+ 0.000000000000000E+000
+ 6 8 -1
+obdef
+loc3d
+ 3.07614280664001 -0.54713129922808 20.00000000000000 1
+kind
+ -736964
+ 0 139338
+ 1.000000000000000E-003
+ OBS 8
+ 0.000000000000000E+000
+ 7 9 -1
+obdef
+loc3d
+ 3.07614280664001 -0.54713129922808 21.00000000000000 1
+kind
+ -736965
+ 0 139338
+ 1.000000000000000E-003
+ OBS 9
+ 0.000000000000000E+000
+ 8 10 -1
+obdef
+loc3d
+ 3.05432619099008 -0.56478854594536 94680.00000000000000 2
+kind
+ 4
+ 0 139338
+ 1.000000000000000E-003
+ OBS 10
+ 0.000000000000000E+000
+ 9 11 -1
+obdef
+loc3d
+ 3.05432619099008 -0.56478854594536 95500.00000000000000 2
+kind
+ 4
+ 0 139338
+ 1.000000000000000E-003
+ OBS 11
+ 0.000000000000000E+000
+ 10 -1 -1
+obdef
+loc3d
+ 3.05432619099008 -0.56478854594536 96310.00000000000000 2
+kind
+ 4
+ 0 139338
+ 1.000000000000000E-003
Added: DART/trunk/models/am2/work/obs_seq.out
===================================================================
--- DART/trunk/models/am2/work/obs_seq.out (rev 0)
+++ DART/trunk/models/am2/work/obs_seq.out 2008-04-04 22:14:26 UTC (rev 3291)
@@ -0,0 +1,168 @@
+ obs_sequence
+obs_kind_definitions
+ 27
+ 1 RADIOSONDE_U_WIND_COMPONENT
+ 2 RADIOSONDE_V_WIND_COMPONENT
+ 3 RADIOSONDE_SURFACE_PRESSURE
+ 4 RADIOSONDE_TEMPERATURE
+ 5 RADIOSONDE_SPECIFIC_HUMIDITY
+ 6 AIRCRAFT_U_WIND_COMPONENT
+ 7 AIRCRAFT_V_WIND_COMPONENT
+ 8 AIRCRAFT_TEMPERATURE
+ 9 ACARS_U_WIND_COMPONENT
+ 10 ACARS_V_WIND_COMPONENT
+ 11 ACARS_TEMPERATURE
+ 12 MARINE_SFC_U_WIND_COMPONENT
+ 13 MARINE_SFC_V_WIND_COMPONENT
+ 14 MARINE_SFC_TEMPERATURE
+ 15 MARINE_SFC_SPECIFIC_HUMIDITY
+ 16 LAND_SFC_U_WIND_COMPONENT
+ 17 LAND_SFC_V_WIND_COMPONENT
+ 18 LAND_SFC_TEMPERATURE
+ 19 LAND_SFC_SPECIFIC_HUMIDITY
+ 20 SAT_U_WIND_COMPONENT
+ 21 SAT_V_WIND_COMPONENT
+ 22 ATOV_TEMPERATURE
+ 23 AIRS_TEMPERATURE
+ 24 AIRS_SPECIFIC_HUMIDITY
+ 25 CLOUD_FRACTION
+ 26 CLOUD_LIQUID_WATER
+ 27 CLOUD_ICE
+ num_copies: 2 num_qc: 1
+ num_obs: 11 max_num_obs: 11
+observations
+truth
+qc
+ first: 1 last: 11
+ OBS 1
+ 284.893460536463
+ 284.929748535156
+ 0.000000000000000E+000
+ -1 2 -1
+obdef
+loc3d
+ 3.03250957534015 -0.54713129922808 20.00000000000000 1
+kind
+ -736940
+ 0 139338
+ 1.000000000000000E-003
+ OBS 2
+ 286.342114496622
+ 286.354248046875
+ 0.000000000000000E+000
+ 1 3 -1
+obdef
+loc3d
+ 3.03250957534015 -0.54713129922808 21.00000000000000 1
+kind
+ -736941
+ 0 139338
+ 1.000000000000000E-003
+ OBS 3
+ 283.984917184698
+ 283.956329345703
+ 0.000000000000000E+000
+ 2 4 -1
+obdef
+loc3d
+ 3.03250957534015 -0.58243006802799 20.00000000000000 1
+kind
+ -733484
+ 0 139338
+ 1.000000000000000E-003
+ OBS 4
+ 285.389911176116
+ 285.378875732422
+ 0.000000000000000E+000
+ 3 5 -1
+obdef
+loc3d
+ 3.03250957534015 -0.58243006802799 21.00000000000000 1
+kind
+ -733485
+ 0 139338
+ 1.000000000000000E-003
+ OBS 5
+ 283.367990601235
+ 283.351379394531
+ 0.000000000000000E+000
+ 4 6 -1
+obdef
+loc3d
+ 3.07614280664001 -0.58243006802799 20.00000000000000 1
+kind
+ -733508
+ 0 139338
+ 1.000000000000000E-003
+ OBS 6
+ 284.744586867223
+ 284.772888183594
+ 0.000000000000000E+000
+ 5 7 -1
+obdef
+loc3d
+ 3.07614280664001 -0.58243006802799 21.00000000000000 1
+kind
+ -733509
+ 0 139338
+ 1.000000000000000E-003
+ OBS 7
+ 284.437324607699
+ 284.461914062500
+ 0.000000000000000E+000
+ 6 8 -1
+obdef
+loc3d
+ 3.07614280664001 -0.54713129922808 20.00000000000000 1
+kind
+ -736964
+ 0 139338
+ 1.000000000000000E-003
+ OBS 8
+ 285.861127505778
+ 285.884918212891
+ 0.000000000000000E+000
+ 7 9 -1
+obdef
+loc3d
+ 3.07614280664001 -0.54713129922808 21.00000000000000 1
+kind
+ -736965
+ 0 139338
+ 1.000000000000000E-003
+ OBS 9
+ 282.339375281228
+ 282.337410609998
+ 0.000000000000000E+000
+ 8 10 -1
+obdef
+loc3d
+ 3.05432619099008 -0.56478854594536 94680.00000000000000 2
+kind
+ 4
+ 0 139338
+ 1.000000000000000E-003
+ OBS 10
+ 283.039130123138
+ 283.033794289783
+ 0.000000000000000E+000
+ 9 11 -1
+obdef
+loc3d
+ 3.05432619099008 -0.56478854594536 95500.00000000000000 2
+kind
+ 4
+ 0 139338
+ 1.000000000000000E-003
+ OBS 11
+ 283.759380580649
+ 283.723668208853
+ 0.000000000000000E+000
+ 10 -1 -1
+obdef
+loc3d
+ 3.05432619099008 -0.56478854594536 96310.00000000000000 2
+kind
+ 4
+ 0 139338
+ 1.000000000000000E-003
Added: DART/trunk/models/am2/work/path_names_column_rand
===================================================================
--- DART/trunk/models/am2/work/path_names_column_rand (rev 0)
+++ DART/trunk/models/am2/work/path_names_column_rand 2008-04-04 22:14:26 UTC (rev 3291)
@@ -0,0 +1,7 @@
+models/am2/column_rand.f90
+random_seq/random_seq_mod.f90
+random_nr/random_nr_mod.f90
+common/types_mod.f90
+utilities/utilities_mod.f90
+time_manager/time_manager_mod.f90
+mpi_utilities/null_mpi_utilities_mod.f90
Added: DART/trunk/models/am2/work/path_names_create_fixed_network_seq
===================================================================
--- DART/trunk/models/am2/work/path_names_create_fixed_network_seq (rev 0)
+++ DART/trunk/models/am2/work/path_names_create_fixed_network_seq 2008-04-04 22:14:26 UTC (rev 3291)
@@ -0,0 +1,14 @@
+obs_sequence/create_fixed_network_seq.f90
+obs_sequence/obs_sequence_mod.f90
+obs_kind/obs_kind_mod.f90
+obs_def/obs_def_mod.f90
+obs_def/obs_def_gps_mod.f90
+assim_model/assim_model_mod.f90
+models/am2/model_mod.f90
+common/types_mod.f90
+location/threed_sphere/location_mod.f90
+random_seq/random_seq_mod.f90
+random_nr/random_nr_mod.f90
+time_manager/time_manager_mod.f90
+utilities/utilities_mod.f90
+mpi_utilities/null_mpi_utilities_mod.f90
Added: DART/trunk/models/am2/work/path_names_create_obs_sequence
===================================================================
--- DART/trunk/models/am2/work/path_names_create_obs_sequence (rev 0)
+++ DART/trunk/models/am2/work/path_names_create_obs_sequence 2008-04-04 22:14:26 UTC (rev 3291)
@@ -0,0 +1,16 @@
+obs_sequence/create_obs_sequence.f90
+ensemble_manager/ensemble_manager_mod.f90
+obs_sequence/obs_sequence_mod.f90
+obs_kind/obs_kind_mod.f90
+obs_model/obs_model_mod.f90
+obs_def/obs_def_mod.f90
+obs_def/obs_def_gps_mod.f90
+assim_model/assim_model_mod.f90
+models/am2/model_mod.f90
+common/types_mod.f90
+location/threed_sphere/location_mod.f90
+random_seq/random_seq_mod.f90
+random_nr/random_nr_mod.f90
+time_manager/time_manager_mod.f90
+utilities/utilities_mod.f90
+mpi_utilities/null_mpi_utilities_mod.f90
Added: DART/trunk/models/am2/work/path_names_filter
===================================================================
--- DART/trunk/models/am2/work/path_names_filter (rev 0)
+++ DART/trunk/models/am2/work/path_names_filter 2008-04-04 22:14:26 UTC (rev 3291)
@@ -0,0 +1,22 @@
+filter/filter.f90
+smoother/smoother_mod.f90
+ensemble_manager/ensemble_manager_mod.f90
+assim_tools/assim_tools_mod.f90
+adaptive_inflate/adaptive_inflate_mod.f90
+sort/sort_mod.f90
+cov_cutoff/cov_cutoff_mod.f90
+reg_factor/reg_factor_mod.f90
+obs_sequence/obs_sequence_mod.f90
+obs_kind/obs_kind_mod.f90
+obs_model/obs_model_mod.f90
+obs_def/obs_def_mod.f90
+obs_def/obs_def_gps_mod.f90
+assim_model/assim_model_mod.f90
+models/am2/model_mod.f90
+common/types_mod.f90
+location/threed_sphere/location_mod.f90
+random_seq/random_seq_mod.f90
+random_nr/random_nr_mod.f90
+time_manager/time_manager_mod.f90
+utilities/utilities_mod.f90
+mpi_utilities/null_mpi_utilities_mod.f90
Added: DART/trunk/models/am2/work/path_names_merge_obs_seq
===================================================================
--- DART/trunk/models/am2/work/path_names_merge_obs_seq (rev 0)
+++ DART/trunk/models/am2/work/path_names_merge_obs_seq 2008-04-04 22:14:26 UTC (rev 3291)
@@ -0,0 +1,15 @@
+obs_sequence/merge_obs_seq.f90
+obs_sequence/obs_sequence_mod.f90
+obs_kind/obs_kind_mod.f90
+obs_def/obs_def_mod.f90
+obs_def/obs_def_gps_mod.f90
+cov_cutoff/cov_cutoff_mod.f90
+assim_model/assim_model_mod.f90
+models/am2/model_mod.f90
+common/types_mod.f90
+location/threed_sphere/location_mod.f90
+mpi_utilities/null_mpi_utilities_mod.f90
+random_seq/random_seq_mod.f90
+random_nr/random_nr_mod.f90
+time_manager/time_manager_mod.f90
+utilities/utilities_mod.f90
Added: DART/trunk/models/am2/work/path_names_obs_diag
===================================================================
--- DART/trunk/models/am2/work/path_names_obs_diag (rev 0)
+++ DART/trunk/models/am2/work/path_names_obs_diag 2008-04-04 22:14:26 UTC (rev 3291)
@@ -0,0 +1,17 @@
+diagnostics/threed_sphere/obs_diag.f90
+sort/sort_mod.f90
+obs_sequence/obs_sequence_mod.f90
+obs_kind/obs_kind_mod.f90
+obs_def/obs_def_mod.f90
+obs_def/obs_def_gps_mod.f90
+obs_def/obs_def_radar_mod.f90
+obs_def/obs_def_dew_point_mod.f90
+assim_model/assim_model_mod.f90
+random_seq/random_seq_mod.f90
+random_nr/random_nr_mod.f90
+models/am2/model_mod.f90
+location/threed_sphere/location_mod.f90
+mpi_utilities/null_mpi_utilities_mod.f90
+common/types_mod.f90
+time_manager/time_manager_mod.f90
+utilities/utilities_mod.f90
Added: DART/trunk/models/am2/work/path_names_perfect_model_obs
===================================================================
--- DART/trunk/models/am2/work/path_names_perfect_model_obs (rev 0)
+++ DART/trunk/models/am2/work/path_names_perfect_model_obs 2008-04-04 22:14:26 UTC (rev 3291)
@@ -0,0 +1,21 @@
+perfect_model_obs/perfect_model_obs.f90
+ensemble_manager/ensemble_manager_mod.f90
+assim_tools/assim_tools_mod.f90
+adaptive_inflate/adaptive_inflate_mod.f90
+sort/sort_mod.f90
+cov_cutoff/cov_cutoff_mod.f90
+reg_factor/reg_factor_mod.f90
+obs_sequence/obs_sequence_mod.f90
+obs_kind/obs_kind_mod.f90
+obs_model/obs_model_mod.f90
+obs_def/obs_def_mod.f90
+obs_def/obs_def_gps_mod.f90
+assim_model/assim_model_mod.f90
+models/am2/model_mod.f90
+common/types_mod.f90
+location/threed_sphere/location_mod.f90
+random_seq/random_seq_mod.f90
+random_nr/random_nr_mod.f90
+time_manager/time_manager_mod.f90
+utilities/utilities_mod.f90
+mpi_utilities/null_mpi_utilities_mod.f90
Added: DART/trunk/models/am2/work/path_names_preprocess
===================================================================
--- DART/trunk/models/am2/work/path_names_preprocess (rev 0)
+++ DART/trunk/models/am2/work/path_names_preprocess 2008-04-04 22:14:26 UTC (rev 3291)
@@ -0,0 +1,3 @@
+preprocess/preprocess.f90
+common/types_mod.f90
+utilities/utilities_mod.f90
Added: DART/trunk/models/am2/work/path_names_trans_date_to_dart
===================================================================
--- DART/trunk/models/am2/work/path_names_trans_date_to_dart (rev 0)
+++ DART/trunk/models/am2/work/path_names_trans_date_to_dart 2008-04-04 22:14:26 UTC (rev 3291)
@@ -0,0 +1,5 @@
+models/am2/trans_date_to_dart.f90
+time_manager/time_manager_mod.f90
+mpi_utilities/null_mpi_utilities_mod.f90
+utilities/utilities_mod.f90
+common/types_mod.f90
Added: DART/trunk/models/am2/work/path_names_trans_pv_sv
===================================================================
--- DART/trunk/models/am2/work/path_names_trans_pv_sv (rev 0)
+++ DART/trunk/models/am2/work/path_names_trans_pv_sv 2008-04-04 22:14:26 UTC (rev 3291)
@@ -0,0 +1,11 @@
+obs_kind/obs_kind_mod.f90
+models/am2/trans_pv_sv.f90
+models/am2/model_mod.f90
+assim_model/assim_model_mod.f90
+utilities/utilities_mod.f90
+mpi_utilities/null_mpi_utilities_mod.f90
+common/types_mod.f90
+time_manager/time_manager_mod.f90
+location/threed_sphere/location_mod.f90
+random_seq/random_seq_mod.f90
+random_nr/random_nr_mod.f90
Added: DART/trunk/models/am2/work/path_names_trans_pv_sv_pert0
===================================================================
--- DART/trunk/models/am2/work/path_names_trans_pv_sv_pert0 (rev 0)
+++ DART/trunk/models/am2/work/path_names_trans_pv_sv_pert0 2008-04-04 22:14:26 UTC (rev 3291)
@@ -0,0 +1,11 @@
+models/am2/trans_pv_sv_pert0.f90
+models/am2/model_mod.f90
+assim_model/assim_model_mod.f90
+utilities/utilities_mod.f90
+mpi_utilities/null_mpi_utilities_mod.f90
+common/types_mod.f90
+time_manager/time_manager_mod.f90
+location/threed_sphere/location_mod.f90
+random_seq/random_seq_mod.f90
+random_nr/random_nr_mod.f90
+obs_kind/obs_kind_mod.f90
\ No newline at end of file
Added: DART/trunk/models/am2/work/path_names_trans_pv_sv_time0
===================================================================
--- DART/trunk/models/am2/work/path_names_trans_pv_sv_time0 (rev 0)
+++ DART/trunk/models/am2/work/path_names_trans_pv_sv_time0 2008-04-04 22:14:26 UTC (rev 3291)
@@ -0,0 +1,11 @@
+models/am2/trans_pv_sv_time0.f90
+models/am2/model_mod.f90
+assim_model/assim_model_mod.f90
+utilities/utilities_mod.f90
+mpi_utilities/null_mpi_utilities_mod.f90
+common/types_mod.f90
+time_manager/time_manager_mod.f90
+location/threed_sphere/location_mod.f90
+random_seq/random_seq_mod.f90
+random_nr/random_nr_mod.f90
+obs_kind/obs_kind_mod.f90
\ No newline at end of file
Added: DART/trunk/models/am2/work/path_names_trans_sv_pv
===================================================================
--- DART/trunk/models/am2/work/path_names_trans_sv_pv (rev 0)
+++ DART/trunk/models/am2/work/path_names_trans_sv_pv 2008-04-04 22:14:26 UTC (rev 3291)
@@ -0,0 +1,11 @@
+obs_kind/obs_kind_mod.f90
+models/am2/trans_sv_pv.f90
+models/am2/model_mod.f90
+assim_model/assim_model_mod.f90
+utilities/utilities_mod.f90
+mpi_utilities/null_mpi_utilities_mod.f90
+common/types_mod.f90
+location/threed_sphere/location_mod.f90
+time_manager/time_manager_mod.f90
+random_seq/random_seq_mod.f90
+random_nr/random_nr_mod.f90
Added: DART/trunk/models/am2/work/path_names_trans_time
===================================================================
--- DART/trunk/models/am2/work/path_names_trans_time (rev 0)
+++ DART/trunk/models/am2/work/path_names_trans_time 2008-04-04 22:14:26 UTC (rev 3291)
@@ -0,0 +1,11 @@
+obs_kind/obs_kind_mod.f90
+models/am2/trans_time.f90
+time_manager/time_manager_mod.f90
+assim_model/assim_model_mod.f90
+utilities/utilities_mod.f90
+mpi_utilities/null_mpi_utilities_mod.f90
+common/types_mod.f90
+location/threed_sphere/location_mod.f90
+random_seq/random_seq_mod.f90
+random_nr/random_nr_mod.f90
+models/am2/model_mod.f90
Added: DART/trunk/models/am2/work/path_names_wakeup_filter
===================================================================
--- DART/trunk/models/am2/work/path_names_wakeup_filter (rev 0)
+++ DART/trunk/models/am2/work/path_names_wakeup_filter 2008-04-04 22:14:26 UTC (rev 3291)
@@ -0,0 +1,5 @@
+filter/wakeup_filter.f90
+common/types_mod.f90
+time_manager/time_manager_mod.f90
+utilities/utilities_mod.f90
+mpi_utilities/null_mpi_utilities_mod.f90
Added: DART/trunk/models/am2/work/workshop_setup.csh
===================================================================
--- DART/trunk/models/am2/work/workshop_setup.csh (rev 0)
+++ DART/trunk/models/am2/work/workshop_setup.csh 2008-04-04 22:14:26 UTC (rev 3291)
@@ -0,0 +1,134 @@
+#!/bin/csh
+#
+# Data Assimilation Research Testbed -- DART
+# Copyright 2004-2007, Data Assimilation Research Section
+# University Corporation for Atmospheric Research
+# Licensed under the GPL -- www.gpl.org/licenses/gpl.html
+#
+# <next few lines under version control, do not edit>
+# $URL$
+# $Id: workshop_setup.csh 2864 2007-04-12 17:24:47Z nancy $
+# $Revision$
+# $Date: 2007-04-12 13:24:47 -0400 (Thu, 12 Apr 2007) $
+
+# Script to manage the compilation of all components for this model;
+# executes a known "perfect model" experiment using an existing
+# observation sequence file (obs_seq.in) and initial conditions appropriate
+# for both 'perfect_model_obs' (perfect_ics) and 'filter' (filter_ics).
+# There are enough initial conditions for 80 ensemble members in filter.
+# Use ens_size = 81 and it WILL bomb. Guaranteed.
+# The 'input.nml' file controls all facets of this execution.
+#
+# 'create_obs_sequence' and 'create_fixed_network_sequence' were used to
+# create the observation sequence file 'obs_seq.in' - this defines
+# what/where/when we want observations. This script does not run these
+# programs - intentionally.
+#
+# 'perfect_model_obs' results in a True_State.nc file that contains
+# the true state, and obs_seq.out - a file that contains the "observations"
+# that will be assimilated by 'filter'.
+#
+# 'filter' results in three files (at least): Prior_Diag.nc - the state
+# of all ensemble members prior to the assimilation (i.e. the forecast),
+# Posterior_Diag.nc - the state of all ensemble members after the
+# assimilation (i.e. the analysis), and obs_seq.final - the ensemble
+# members' estimate of what the observations should have been.
+#
+# Once 'perfect_model_obs' has advanced the model and harvested the
+# observations for the assimilation experiment, 'filter' may be run
+# over and over by simply changing the namelist parameters in input.nml.
+#
+# The result of each assimilation can be explored in model-space with
+# matlab scripts that directly read the netCDF output, or in observation-space.
+# 'obs_diag' is a program that will create observation-space diagnostics
+# for any result of 'filter' and results in a couple data files that can
+# be explored with yet more matlab scripts.
+#
+#----------------------------------------------------------------------
+# 'preprocess' is a program that culls the appropriate sections of the
+# observation module for the observations types in 'input.nml'; the
+# resulting source file is used by all the remaining programs,
+# so this MUST be run first.
+#----------------------------------------------------------------------
+
+\rm -f preprocess *.o *.mod
+\rm -f ../../../obs_def/obs_def_mod.f90
+\rm -f ../../../obs_kind/obs_kind_mod.f90
+
+set MODEL = "am2"
+
+@ n = 1
+
+echo
+echo
+echo "---------------------------------------------------------------"
+echo "${MODEL} build number ${n} is preprocess"
+
+csh mkmf_preprocess
+make || exit $n
+
+./preprocess || exit 99
+
+#----------------------------------------------------------------------
+# Build all the single-threaded targets
+#----------------------------------------------------------------------
+
+foreach TARGET ( mkmf_* )
+
+ set PROG = `echo $TARGET | sed -e 's#mkmf_##'`
+
+ switch ( $TARGET )
+ case mkmf_preprocess:
+ breaksw
+ default:
+ @ n = $n + 1
+ echo
+ echo "---------------------------------------------------"
+ echo "${MODEL} build number ${n} is ${PROG}"
+ \rm -f ${PROG}
+ csh $TARGET || exit $n
+ make || exit $n
+ breaksw
+ endsw
+end
+
+#
+# Since we're not building the MPI targets...
+#
+exit 0
+#----------------------------------------------------------------------
+# Build the MPI-enabled target(s)
+#----------------------------------------------------------------------
+
+\rm -f *.o *.mod filter wakeup_filter
+
+echo
+echo "---------------------------------------------------"
+echo "build number $n is mkmf_filter"
+csh mkmf_filter -mpi
+make
+
+if ($status != 0) then
+ echo
+ echo "If this died in mpi_utilities_mod, see code comment"
+ echo "in mpi_utilities_mod.f90 starting with 'BUILD TIP' "
+ echo
+ exit $n
+endif
+@ n = $n + 1
+
+echo
+echo "---------------------------------------------------"
+echo "build number $n is mkmf_wakeup_filter"
+csh mkmf_wakeup_filter -mpi
+make || exit $n
+
+\rm -f *.o *.mod
+
+echo
+echo 'time to run filter here:'
+echo ' for lsf run "bsub < runme_filter"'
+echo ' for pbs run "qsub runme_filter"'
+echo ' for lam-mpi run "lamboot" once, then "runme_filter"'
+echo ' for mpich run "mpd" once, then "runme_filter"'
+
Property changes on: DART/trunk/models/am2/work/workshop_setup.csh
___________________________________________________________________
Name: svn:executable
+ *
Name: svn:keywords
+ "Date Rev Author URL Id"
More information about the Dart-dev
mailing list