[Dart-dev] [7143] DART/trunk/observations/Ameriflux: More robust way of matching the columns needed.
nancy at ucar.edu
nancy at ucar.edu
Tue Aug 26 12:29:46 MDT 2014
Revision: 7143
Author: thoar
Date: 2014-08-26 12:29:45 -0600 (Tue, 26 Aug 2014)
Log Message:
-----------
More robust way of matching the columns needed. Removed the hardcoded requirement of
exactly 34 columns of data values. Can be more, can be less ... as long as the
mandatory columns are present.
They moved the data sources ... again.
Updated the html to reflect where I found them this time.
Added support for the obs_seq_to_netcdf, just for fun.
Modified Paths:
--------------
DART/trunk/observations/Ameriflux/level4_to_obs.f90
DART/trunk/observations/Ameriflux/level4_to_obs.html
DART/trunk/observations/Ameriflux/work/input.nml
DART/trunk/observations/Ameriflux/work/path_names_level4_to_obs
DART/trunk/observations/Ameriflux/work/path_names_obs_sequence_tool
DART/trunk/observations/Ameriflux/work/path_names_preprocess
Added Paths:
-----------
DART/trunk/observations/Ameriflux/work/mkmf_obs_seq_to_netcdf
DART/trunk/observations/Ameriflux/work/path_names_obs_seq_to_netcdf
-------------- next part --------------
Modified: DART/trunk/observations/Ameriflux/level4_to_obs.f90
===================================================================
--- DART/trunk/observations/Ameriflux/level4_to_obs.f90 2014-08-26 16:45:59 UTC (rev 7142)
+++ DART/trunk/observations/Ameriflux/level4_to_obs.f90 2014-08-26 18:29:45 UTC (rev 7143)
@@ -22,7 +22,7 @@
register_module, error_handler, E_MSG, E_ERR, &
open_file, close_file, do_nml_file, do_nml_term, &
check_namelist_read, find_namelist_in_file, &
- nmlfileunit
+ nmlfileunit, logfileunit
use time_manager_mod, only : time_type, set_calendar_type, GREGORIAN, &
set_date, set_time, get_time, print_time, &
@@ -52,8 +52,8 @@
! Namelist with default values
!-----------------------------------------------------------------------
-character(len=128) :: text_input_file = 'textdata.input'
-character(len=128) :: obs_out_file = 'obs_seq.out'
+character(len=256) :: text_input_file = 'textdata.input'
+character(len=256) :: obs_out_file = 'obs_seq.out'
integer :: year = -1
real(r8) :: timezoneoffset = -1.0_r8
real(r8) :: latitude = -1.0_r8
@@ -72,8 +72,8 @@
!-----------------------------------------------------------------------
character(len=300) :: input_line, bigline
-character(len=256) :: string1, string2, string3
-integer :: iline, nlines
+character(len=512) :: string1, string2, string3
+integer :: iline, nlines, nwords
logical :: first_obs
integer :: oday, osec, rcio, iunit
integer :: num_copies, num_qc, max_obs
@@ -85,16 +85,16 @@
type towerdata
type(time_type) :: time_obs
- character(len=20) :: monthstring = 'month'
- character(len=20) :: daystring = 'day'
- character(len=20) :: hourstring = 'hour'
- character(len=20) :: doystring = 'doy'
- character(len=20) :: neestring = 'nee_or_fmds'
- character(len=20) :: neeQCstring = 'nee_or_fmdsqc'
- character(len=20) :: lestring = 'le_f'
- character(len=20) :: leQCstring = 'le_fqc'
- character(len=20) :: hstring = 'h_f'
- character(len=20) :: hQCstring = 'h_fqc'
+ character(len=20) :: monthstring = 'Month'
+ character(len=20) :: daystring = 'Day'
+ character(len=20) :: hourstring = 'Hour'
+ character(len=20) :: doystring = 'DoY'
+ character(len=20) :: neestring = 'NEE_or_fMDS'
+ character(len=20) :: neeQCstring = 'NEE_or_fMDSqc'
+ character(len=20) :: lestring = 'LE_f'
+ character(len=20) :: leQCstring = 'LE_fqc'
+ character(len=20) :: hstring = 'H_f'
+ character(len=20) :: hQCstring = 'H_fqc'
integer :: monthindex
integer :: dayindex
integer :: hourindex
@@ -142,9 +142,11 @@
offset = set_time(nint(abs(timezoneoffset)*3600.0_r8),0)
prev_time = set_time(0, 0)
-if (verbose) print *, 'tower located at lat, lon, elev =', latitude, longitude, elevation
-if (verbose) print *, 'flux observations taken at =', flux_height,'m'
+write(string1, *) 'tower located at lat, lon, elev =', latitude, longitude, elevation
+write(string2, *) 'flux observations taken at =', flux_height,'m'
+if (verbose) call error_handler(E_MSG,'level4_to_obs',string1,text2=string2)
+
! check the lat/lon values to see if they are ok
if (longitude < 0.0_r8) longitude = longitude + 360.0_r8
@@ -170,7 +172,7 @@
! in observation sequence - the other is for the new observation.
iunit = open_file(text_input_file, 'formatted', 'read')
-if (verbose) print *, 'opened input file ' // trim(text_input_file)
+if (verbose) call error_handler(E_MSG,'level4_to_obs','opened input file '//trim(text_input_file))
nlines = count_file_lines(iunit)
max_obs = 3*nlines
@@ -191,7 +193,7 @@
! The first line describes all the fields ... column headers, if you will
rewind(iunit)
-call decode_header(iunit)
+call decode_header(iunit, nwords)
obsloop: do iline = 2,nlines
@@ -207,11 +209,13 @@
input_line = adjustl(bigline)
! parse the line into the tower structure (including the observation time)
- call stringparse(input_line, iline)
+ call stringparse(input_line, nwords, iline)
if (iline <= 2) then
write(*,*)''
- write(*,*)'Check of the first observation: (column,string,value)'
+ call print_date(tower%time_obs, ' first observation date (local time) is')
+ call print_time(tower%time_obs, ' first observation time (local time) is')
+ write(*,*)'first observation raw values: (column,string,value) timezone not applied'
write(*,*)tower%monthindex, tower%monthstring , tower%month
write(*,*)tower%dayindex , tower%daystring , tower%day
write(*,*)tower%hourindex , tower%hourstring , tower%hour
@@ -222,14 +226,33 @@
write(*,*)tower%leQCindex , tower%leQCstring , tower%leQC
write(*,*)tower%neeindex , tower%neestring , tower%nee
write(*,*)tower%neeQCindex, tower%neeQCstring , tower%neeQC
- call print_date(tower%time_obs, 'observation date is')
- call print_time(tower%time_obs, 'observation time is')
+ write(*,*)''
+
+ write(logfileunit,*)''
+ call print_date(tower%time_obs, ' first observation date (local time) is',logfileunit)
+ call print_time(tower%time_obs, ' first observation time (local time) is',logfileunit)
+ write(logfileunit,*)'first observation raw values: (column,string,value) timezone not applied'
+ write(logfileunit,*)tower%monthindex, tower%monthstring , tower%month
+ write(logfileunit,*)tower%dayindex , tower%daystring , tower%day
+ write(logfileunit,*)tower%hourindex , tower%hourstring , tower%hour
+ write(logfileunit,*)tower%doyindex , tower%doystring , tower%doy
+ write(logfileunit,*)tower%hindex , tower%hstring , tower%h
+ write(logfileunit,*)tower%hQCindex , tower%hQCstring , tower%hQC
+ write(logfileunit,*)tower%leindex , tower%lestring , tower%le
+ write(logfileunit,*)tower%leQCindex , tower%leQCstring , tower%leQC
+ write(logfileunit,*)tower%neeindex , tower%neestring , tower%nee
+ write(logfileunit,*)tower%neeQCindex, tower%neeQCstring , tower%neeQC
+ write(logfileunit,*)''
end if
- if (verbose) call print_date(tower%time_obs, 'obs time is')
-
call get_time(tower%time_obs, osec, oday)
+ if (verbose) then
+ write(string1,*)'obs time is (seconds,days) ',osec, oday,' obs date is '
+ call print_date(tower%time_obs, trim(string1))
+ call print_date(tower%time_obs, trim(string1),logfileunit)
+ endif
+
! make an obs derived type, and then add it to the sequence
! If the QC value is good, use the observation.
! Increasingly larger QC values are more questionable quality data.
@@ -269,7 +292,8 @@
! if we added any obs to the sequence, write it out to a file now.
if ( get_num_obs(obs_seq) > 0 ) then
- if (verbose) print *, 'writing obs_seq, obs_count = ', get_num_obs(obs_seq)
+ write(string1,*)'writing obs_seq, obs_count = ', get_num_obs(obs_seq)
+ if (verbose) call error_handler(E_MSG,'level4_to_obs',string1)
call write_obs_seq(obs_seq, obs_out_file)
endif
@@ -435,15 +459,16 @@
-subroutine decode_header(iunit)
+subroutine decode_header(iunit,ncolumns)
! Reads the first line of the header and parses the information.
! And by parse, I mean determine which columns are the columns
! of interest.
integer, intent(in) :: iunit
+integer, intent(out) :: ncolumns
integer, parameter :: maxwordlength = 30
-integer :: i,charcount,columncount,wordlength,maxlength
+integer :: i,charcount,columncount,wordlength
character(len=maxwordlength), dimension(:), allocatable :: columns
integer, dimension(10) :: qc = 0
@@ -460,8 +485,8 @@
! Count how many commas are in the line - use this to determine how many columns
charcount = CountChar(input_line,',')
-columncount = charcount + 1
-allocate(columns(columncount))
+ncolumns = charcount + 1
+allocate(columns(ncolumns))
columncount = 0 ! track the number of columns
wordlength = 0 ! number of characters in the column descriptor
@@ -475,7 +500,8 @@
call error_handler(E_ERR,'decode_header',string1, source, revision, revdate)
endif
columns(columncount) = input_line((i-wordlength):(i-1))
- if (verbose) write(*,*)'word(',columncount,') is ',columns(columncount)
+ write(string1,*) 'word(',columncount,') is ',columns(columncount)
+ if (verbose) call error_handler(E_MSG,'decode_header',string1)
wordlength = 0
charcount = i
else
@@ -485,33 +511,45 @@
! There is one more column after the last comma
-columns(columncount+1) = input_line((charcount+1):len_trim(input_line))
+if ((columncount+1) /= ncolumns) then
+ write(string1,*)'parsed wrong number of words ...'
+ write(string2,*)'expected ',ncolumns,' got ',columncount+1
+ call error_handler(E_ERR,'decode_header',string1,source,revision,revdate, &
+ text2=trim(string2), text3=trim(input_line))
+endif
+columns(ncolumns) = input_line((charcount+1):len_trim(input_line))
+
+write(string1,*)'word(',ncolumns,') is ',columns(ncolumns)
+if (verbose) call error_handler(E_MSG,'decode_header',string1)
+
! Finally get to the task at hand
-tower%monthindex = Match(columns,'Month') ! used to be 1
-tower%dayindex = Match(columns,'Day') ! used to be 2
-tower%hourindex = Match(columns,'Hour') ! used to be 3
-tower%doyindex = Match(columns,'DoY') ! used to be 4
-tower%hindex = Match(columns,'H_f') ! used to be 15
-tower%hQCindex = Match(columns,'H_fqc') ! used to be 16
-tower%leindex = Match(columns,'LE_f') ! used to be 17
-tower%leQCindex = Match(columns,'LE_fqc') ! used to be 18
-tower%neeindex = Match(columns,'NEE_or_fMDS') ! used to be 26
-tower%neeQCindex = Match(columns,'NEE_or_fMDSqc') ! used to be 27
+tower%monthindex = Match(columns, tower%monthstring)
+tower%dayindex = Match(columns, tower%daystring)
+tower%hourindex = Match(columns, tower%hourstring)
+tower%doyindex = Match(columns, tower%doystring)
+tower%hindex = Match(columns, tower%hstring)
+tower%hQCindex = Match(columns, tower%hQCstring)
+tower%leindex = Match(columns, tower%lestring)
+tower%leQCindex = Match(columns, tower%leQCstring)
+tower%neeindex = Match(columns, tower%neestring)
+tower%neeQCindex = Match(columns, tower%neeQCstring)
+! FIXME ... find a column marked 'year' or 'error_var' and use if possible.
+
! Check to make sure we got all the indices we need
-qc( 1) = CheckIndex( tower%monthindex , 'Month' )
-qc( 2) = CheckIndex( tower%dayindex , 'Day' )
-qc( 3) = CheckIndex( tower%hourindex , 'Hour' )
-qc( 4) = CheckIndex( tower%doyindex , 'DoY' )
-qc( 5) = CheckIndex( tower%hindex , 'H_f' )
-qc( 6) = CheckIndex( tower%hQCindex , 'H_fqc' )
-qc( 7) = CheckIndex( tower%leindex , 'LE_f' )
-qc( 8) = CheckIndex( tower%leQCindex , 'LE_fqc' )
-qc( 9) = CheckIndex( tower%neeindex , 'NEE_or_fMDS' )
-qc(10) = CheckIndex( tower%neeQCindex , 'NEE_or_fMDSqc' )
+qc( 1) = CheckIndex( tower%monthindex, tower%monthstring)
+qc( 2) = CheckIndex( tower%dayindex , tower%daystring)
+qc( 3) = CheckIndex( tower%hourindex , tower%hourstring)
+qc( 4) = CheckIndex( tower%doyindex , tower%doystring)
+qc( 5) = CheckIndex( tower%hindex , tower%hstring)
+qc( 6) = CheckIndex( tower%hQCindex , tower%hQCstring)
+qc( 7) = CheckIndex( tower%leindex , tower%lestring)
+qc( 8) = CheckIndex( tower%leQCindex , tower%leQCstring)
+qc( 9) = CheckIndex( tower%neeindex , tower%neestring)
+qc(10) = CheckIndex( tower%neeQCindex, tower%neeQCstring)
if (any(qc /= 0) ) then
write(string1,*)'Did not find all the required column indices.'
@@ -521,16 +559,17 @@
! Summarize if desired
if (verbose) then
- write(*,*)'index is ', tower%monthindex ,' at one point it was 1'
- write(*,*)'index is ', tower%dayindex ,' at one point it was 2'
- write(*,*)'index is ', tower%hourindex ,' at one point it was 3'
- write(*,*)'index is ', tower%doyindex ,' at one point it was 4'
- write(*,*)'index is ', tower%hindex ,' at one point it was 15'
- write(*,*)'index is ', tower%hQCindex ,' at one point it was 16'
- write(*,*)'index is ', tower%leindex ,' at one point it was 17'
- write(*,*)'index is ', tower%leQCindex ,' at one point it was 18'
- write(*,*)'index is ', tower%neeindex ,' at one point it was 26'
- write(*,*)'index is ', tower%neeQCindex ,' at one point it was 27'
+110 format('index for ',A20,' is ',i3)
+ write(*,110)tower%monthstring, tower%monthindex
+ write(*,110)tower%daystring , tower%dayindex
+ write(*,110)tower%hourstring , tower%hourindex
+ write(*,110)tower%doystring , tower%doyindex
+ write(*,110)tower%hstring , tower%hindex
+ write(*,110)tower%hQCstring , tower%hQCindex
+ write(*,110)tower%lestring , tower%leindex
+ write(*,110)tower%leQCstring , tower%leQCindex
+ write(*,110)tower%neestring , tower%neeindex
+ write(*,110)tower%neeQCstring, tower%neeQCindex
endif
deallocate(columns)
@@ -572,7 +611,7 @@
integer :: i
Match = 0
-WordLoop : do i = 1,len(sentence)
+WordLoop : do i = 1,size(sentence)
if (trim(sentence(i)) == trim(word)) then
Match = i
return
@@ -605,16 +644,19 @@
-subroutine stringparse(str1,linenum)
+subroutine stringparse(str1, nwords, linenum)
! just declare everything as reals and chunk it
character(len=*), intent(in) :: str1
+integer , intent(in) :: nwords
integer , intent(in) :: linenum
-real(r8), dimension(34) :: values
+real(r8), allocatable, dimension(:) :: values
integer :: iday, ihour, imin, isec, seconds
type(time_type) :: time0, time1, time2
+allocate(values(nwords))
+
values = MISSING_R8
read(str1,*,iostat=rcio) values
@@ -645,6 +687,8 @@
tower%h = values(tower%hindex )
tower%hQC = nint(values(tower%hQCindex ))
+deallocate(values)
+
! decode the time pieces ... two times ...
! The LAST line of these files is knackered ... and we have to check that
! if the doy is greater than the ymd ...
@@ -665,7 +709,8 @@
call get_time(time2, isec, iday)
if ( iday > 0 ) then
- ! we need to change the day ...
+ ! FIXME we need to change the day ...
+ ! This blows up if you try to use a non-leap year with leapyear ...
tower%time_obs = time1
@@ -680,6 +725,12 @@
call print_time(time0, 'stringparse: using ymd time is')
call print_time(time1, 'stringparse: using doy time is')
call print_time(time2, 'stringparse: difference is')
+
+ call print_date(time0, 'stringparse: using ymd date is',logfileunit)
+ call print_date(time1, 'stringparse: using doy date is',logfileunit)
+ call print_time(time0, 'stringparse: using ymd time is',logfileunit)
+ call print_time(time1, 'stringparse: using doy time is',logfileunit)
+ call print_time(time2, 'stringparse: difference is',logfileunit)
endif
else
@@ -730,7 +781,7 @@
! - Rg_fqc : global radiation quality flags:
! 0 = original, 1 = A (most reliable), 2 = B (medium), 3 = C (least reliable).
! (Refer to Reichstein et al. 2005 Global Change Biology )
-! - Ta_f : air temperature filled [\xB0C]
+! - Ta_f : air temperature filled [C]
! - Ta_fqc : air temperature quality flags:
! 0 = original, 1 = A (most reliable), 2 = B (medium), 3 = C (least reliable).
! (Refer to Reichstein et al. 2005 Global Change Biology )
@@ -738,7 +789,7 @@
! - VPD_fqc : vapour pressure deficit quality flags:
! 0 = original, 1 = A (most reliable), 2 = B (medium), 3 = C (least reliable).
! (Refer to Reichstein et al. 2005 Global Change Biology )
-! - Ts_f : soil temperature filled [\xB0C]
+! - Ts_f : soil temperature filled [C]
! - Ts_fqc : soil temperature quality flags:
! 0 = original, 1 = A (most reliable), 2 = B (medium), 3 = C (least reliable).
! (Refer to Reichstein et al. 2005 Global Change Biology )
Modified: DART/trunk/observations/Ameriflux/level4_to_obs.html
===================================================================
--- DART/trunk/observations/Ameriflux/level4_to_obs.html 2014-08-26 16:45:59 UTC (rev 7142)
+++ DART/trunk/observations/Ameriflux/level4_to_obs.html 2014-08-26 18:29:45 UTC (rev 7143)
@@ -39,9 +39,9 @@
<H4>AmeriFlux Level 4 data to DART Observation Sequence Converter</H4>
<P>This routine is designed to convert the flux tower Level 4 data
-from the <a href="http://public.ornl.gov/ameriflux/">AmeriFlux</a>
+from the <a href="http://ameriflux.lbl.gov">AmeriFlux</a>
network of observations from micrometeorological tower sites.
-AmeriFlux is part of <a href="http://daac.ornl.gov/FLUXNET/fluxnet.shtml">FLUXNET</a>
+AmeriFlux is part of <a href="http://fluxnet.ornl.gov">FLUXNET</a>
and the converter is hoped to be a suitable starting point for the conversion
of observations from FLUXNET. As of May 2012, I have not yet tried to work with
any other observations from FLUXNET.
@@ -50,8 +50,7 @@
The AmeriFlux Level 4 products are recorded using the local time.
DART observation sequence files use GMT. For more information about
AmeriFlux data products, go to
-<a href="http://public.ornl.gov/ameriflux/dataproducts.shtml">
-http://public.ornl.gov/ameriflux/dataproducts.shtml</a>.
+<a href="http://ameriflux.lbl.gov">http://ameriflux.lbl.gov</a>.
</P>
<P>
@@ -184,9 +183,20 @@
<P>
The data was acquired from
-<a href="http://public.ornl.gov/ameriflux/dataproducts.shtm">
-http://public.ornl.gov/ameriflux/dataproducts.shtm</a><br />
+<a href="http://cdiac.ornl.gov/ftp/ameriflux/data/Level4/Sites_ByName">
+http://cdiac.ornl.gov/ftp/ameriflux/data/Level4/Sites_ByName</a><br />
+and have names like
+<em class=file>
+USBar2004_L4_h.txt,
+USHa12004_L4_h.txt,
+USNR12004_L4_h.txt,
+USSP32004_L4_h.txt,
+USSRM2004_L4_h.txt,
+USWCr2004_L4_h.txt,
+USWrc2004_L4_h.txt, ...
+</em>
<br />
+<br />
The Level 4 products in question are ASCII files of comma-separated values taken
every 30 minutes for an entire year. The first line is a comma-separated list of
column descriptors, all subsequent lines are comma-separated numerical values.
@@ -256,31 +266,40 @@
0 = original, 1 = category A (most reliable), 2 = category B (medium), 3 = category C (least reliable). (Refer to Reichstein et al. 2005 Global Change Biology for more information)</blockquote>
<br />
<P>
-I am repeating the AmeriFlux <a href="http://public.ornl.gov/ameriflux/data-fair-use.shtml">
+I am repeating the AmeriFlux <a href="http://ameriflux.lbl.gov/Data/Pages/DataUsagePolicy.aspx">
Data Fair-Use Policy</a> because I believe it is important to be a good scientific citizen:
</P>
+<blockquote>
+"The AmeriFlux data provided on this site are freely available and
+were furnished by individual AmeriFlux scientists who encourage their use.
<br />
+<br />
+Please kindly inform in writing (or e-mail) the appropriate AmeriFlux
+scientist(s) of how you intend to use the data and of any publication plans.
+It is also important to contact the AmeriFlux investigator to assure you are
+downloading the latest revision of the data and to prevent potential misuse
+or misinterpretation of the data.
+<br />
+<br />
+Please acknowledge the data source as a citation or in the acknowledgments
+if no citation is available. If the AmeriFlux Principal Investigators (PIs)
+feel that they should be acknowledged or offered participation as authors,
+they will let you know and we assume that an agreement on such matters
+will be reached before publishing and/or use of the data for publication.
+<br />
+<br />
+If your work directly competes with the PI's analysis they may ask that they
+have the opportunity to submit a manuscript before you submit one that uses
+unpublished data. In addition, when publishing please acknowledge the agency
+that supported the research.
+<br />
+<br />
+Lastly, we kindly request that those publishing papers using AmeriFlux data
+provide reprints to the PIs providing the data and to the AmeriFlux archive
+via ameriflux.lbl.gov."
+</blockquote>
-<blockquote>
-The AmeriFlux data ... are freely available and were furnished by
-individual AmeriFlux scientists who encourage their use. Please kindly inform in
-writing (or e-mail) the appropriate AmeriFlux scientist(s) of how you intend to use
-the data and of any publication plans. It is also important to contact the AmeriFlux
-investigator to assure you are downloading the latest revision of the data and to
-prevent potential misuse or misinterpretation of the data. Please acknowledge the
-data source as a citation or in the acknowledgments if no citation is available.
-If the AmeriFlux Principal Investigators (PIs) feel that they should be
-acknowledged or offered participation as authors, they will let you know and we
-assume that an agreement on such matters will be reached before publishing and/or
-use of the data for publication. If your work directly competes with the PI's
-analysis they may ask that they have the opportunity to submit a manuscript before
-you submit one that uses unpublished data. In addition, when publishing please
-acknowledge the agency that supported the research. Lastly, we kindly request that
-those publishing papers using AmeriFlux data provide reprints to the PIs providing
-the data and to the data archive at the Carbon Dioxide Information Analysis Center
-(CDIAC).</blockquote>
-
<!--==================================================================-->
<A NAME="Programs"></A>
Modified: DART/trunk/observations/Ameriflux/work/input.nml
===================================================================
--- DART/trunk/observations/Ameriflux/work/input.nml 2014-08-26 16:45:59 UTC (rev 7142)
+++ DART/trunk/observations/Ameriflux/work/input.nml 2014-08-26 18:29:45 UTC (rev 7143)
@@ -1,8 +1,9 @@
&preprocess_nml
- input_obs_kind_mod_file = '../../../obs_kind/DEFAULT_obs_kind_mod.F90',
- output_obs_kind_mod_file = '../../../obs_kind/obs_kind_mod.f90',
- input_obs_def_mod_file = '../../../obs_def/DEFAULT_obs_def_mod.F90',
- output_obs_def_mod_file = '../../../obs_def/obs_def_mod.f90',
+ overwrite_output = .true.
+ input_obs_kind_mod_file = '../../../obs_kind/DEFAULT_obs_kind_mod.F90'
+ output_obs_kind_mod_file = '../../../obs_kind/obs_kind_mod.f90'
+ input_obs_def_mod_file = '../../../obs_def/DEFAULT_obs_def_mod.F90'
+ output_obs_def_mod_file = '../../../obs_def/obs_def_mod.f90'
input_files = '../../../obs_def/obs_def_tower_mod.f90'
/
@@ -13,7 +14,7 @@
/
&utilities_nml
- module_details = .FALSE.,
+ module_details = .FALSE.
termlevel = 2
/
@@ -21,24 +22,54 @@
write_binary_obs_sequence = .FALSE.
/
-
&level4_to_obs_nml
- text_input_file = '../data/USHa12003_L4_h.txt',
- obs_out_file = 'obs_seq.out',
- year = 2003,
- timezoneoffset = -6,
- latitude = 42.5378,
- longitude = -72.1715,
- elevation = 353,
- flux_height = 29,
- maxgoodqc = 3,
+ text_input_file = '../data/USHa12003_L4_h.txt'
+ obs_out_file = 'obs_seq.out'
+ year = 2003
+ timezoneoffset = -6
+ latitude = 42.5378
+ longitude = -72.1715
+ elevation = 353
+ flux_height = 29
+ maxgoodqc = 3
verbose = .TRUE.
/
+# This is appropriate for a days worth of flux tower observations
+# the obs in the file end 1 second before the time in the name.
+# If you are using these obs with CLM, ending 1 second before is appropriate.
+# 2003-07-23-00000 is DART 147030 00000
&obs_sequence_tool_nml
- filename_seq_list = 'station_list.txt',
- filename_out = 'obs_seq.out',
- gregorian_cal = .TRUE.
+ filename_seq = 'obs_seq.out'
+ filename_seq_list = ''
+ filename_out = 'obs_seq.2003-07-23-00000'
+ print_only = .false.
+ gregorian_cal = .true.
+ first_obs_days = 147030
+ first_obs_seconds = 0
+ last_obs_days = 147030
+ last_obs_seconds = 86399
/
+&schedule_nml
+ calendar = 'Gregorian'
+ first_bin_start = 1601, 1, 1, 0, 0, 0
+ first_bin_end = 2999, 1, 1, 0, 0, 0
+ last_bin_end = 2999, 1, 1, 0, 0, 0
+ bin_interval_days = 1000000
+ bin_interval_seconds = 0
+ max_num_bins = 1000
+ print_table = .true.
+ /
+&obs_seq_to_netcdf_nml
+ obs_sequence_name = 'obs_seq.out'
+ obs_sequence_list = ''
+ append_to_netcdf = .false.
+ lonlim1 = 0.0
+ lonlim2 = 360.0
+ latlim1 = -90.0
+ latlim2 = 90.0
+ verbose = .false.
+ /
+
Added: DART/trunk/observations/Ameriflux/work/mkmf_obs_seq_to_netcdf
===================================================================
--- DART/trunk/observations/Ameriflux/work/mkmf_obs_seq_to_netcdf (rev 0)
+++ DART/trunk/observations/Ameriflux/work/mkmf_obs_seq_to_netcdf 2014-08-26 18:29:45 UTC (rev 7143)
@@ -0,0 +1,18 @@
+#!/bin/csh
+#
+# DART software - Copyright 2004 - 2013 UCAR. This open source software is
+# provided by UCAR, "as is", without charge, subject to all terms of use at
+# http://www.image.ucar.edu/DAReS/DART/DART_download
+#
+# DART $Id$
+
+../../../mkmf/mkmf -p obs_seq_to_netcdf -t ../../../mkmf/mkmf.template \
+ -a "../../.." path_names_obs_seq_to_netcdf
+
+exit $status
+
+# <next few lines under version control, do not edit>
+# $URL$
+# $Revision$
+# $Date$
+
Property changes on: DART/trunk/observations/Ameriflux/work/mkmf_obs_seq_to_netcdf
___________________________________________________________________
Added: svn:executable
+ *
Added: svn:mime-type
+ text/plain
Added: svn:keywords
+ Date Rev Author HeadURL Id
Added: svn:eol-style
+ native
Modified: DART/trunk/observations/Ameriflux/work/path_names_level4_to_obs
===================================================================
--- DART/trunk/observations/Ameriflux/work/path_names_level4_to_obs 2014-08-26 16:45:59 UTC (rev 7142)
+++ DART/trunk/observations/Ameriflux/work/path_names_level4_to_obs 2014-08-26 18:29:45 UTC (rev 7143)
@@ -1,12 +1,12 @@
-observations/Ameriflux/level4_to_obs.f90
+assim_model/assim_model_mod.f90
+common/types_mod.f90
location/threed_sphere/location_mod.f90
+models/template/model_mod.f90
+mpi_utilities/null_mpi_utilities_mod.f90
+obs_def/obs_def_mod.f90
+obs_kind/obs_kind_mod.f90
obs_sequence/obs_sequence_mod.f90
-obs_kind/obs_kind_mod.f90
-obs_def/obs_def_mod.f90
-assim_model/assim_model_mod.f90
-models/clm/model_mod.f90
-common/types_mod.f90
+observations/Ameriflux/level4_to_obs.f90
random_seq/random_seq_mod.f90
+time_manager/time_manager_mod.f90
utilities/utilities_mod.f90
-time_manager/time_manager_mod.f90
-mpi_utilities/null_mpi_utilities_mod.f90
Added: DART/trunk/observations/Ameriflux/work/path_names_obs_seq_to_netcdf
===================================================================
--- DART/trunk/observations/Ameriflux/work/path_names_obs_seq_to_netcdf (rev 0)
+++ DART/trunk/observations/Ameriflux/work/path_names_obs_seq_to_netcdf 2014-08-26 18:29:45 UTC (rev 7143)
@@ -0,0 +1,13 @@
+assim_model/assim_model_mod.f90
+common/types_mod.f90
+location/threed_sphere/location_mod.f90
+models/template/model_mod.f90
+mpi_utilities/null_mpi_utilities_mod.f90
+obs_def/obs_def_mod.f90
+obs_kind/obs_kind_mod.f90
+obs_sequence/obs_seq_to_netcdf.f90
+obs_sequence/obs_sequence_mod.f90
+random_seq/random_seq_mod.f90
+time_manager/schedule_mod.f90
+time_manager/time_manager_mod.f90
+utilities/utilities_mod.f90
Modified: DART/trunk/observations/Ameriflux/work/path_names_obs_sequence_tool
===================================================================
--- DART/trunk/observations/Ameriflux/work/path_names_obs_sequence_tool 2014-08-26 16:45:59 UTC (rev 7142)
+++ DART/trunk/observations/Ameriflux/work/path_names_obs_sequence_tool 2014-08-26 18:29:45 UTC (rev 7143)
@@ -1,13 +1,13 @@
-obs_sequence/obs_sequence_tool.f90
-obs_sequence/obs_sequence_mod.f90
-obs_kind/obs_kind_mod.f90
-obs_def/obs_def_mod.f90
-cov_cutoff/cov_cutoff_mod.f90
assim_model/assim_model_mod.f90
-models/clm/model_mod.f90
common/types_mod.f90
+cov_cutoff/cov_cutoff_mod.f90
location/threed_sphere/location_mod.f90
+models/template/model_mod.f90
mpi_utilities/null_mpi_utilities_mod.f90
+obs_def/obs_def_mod.f90
+obs_kind/obs_kind_mod.f90
+obs_sequence/obs_sequence_mod.f90
+obs_sequence/obs_sequence_tool.f90
random_seq/random_seq_mod.f90
time_manager/time_manager_mod.f90
utilities/utilities_mod.f90
Modified: DART/trunk/observations/Ameriflux/work/path_names_preprocess
===================================================================
--- DART/trunk/observations/Ameriflux/work/path_names_preprocess 2014-08-26 16:45:59 UTC (rev 7142)
+++ DART/trunk/observations/Ameriflux/work/path_names_preprocess 2014-08-26 18:29:45 UTC (rev 7143)
@@ -1,5 +1,5 @@
-preprocess/preprocess.f90
common/types_mod.f90
-utilities/utilities_mod.f90
mpi_utilities/null_mpi_utilities_mod.f90
+preprocess/preprocess.f90
time_manager/time_manager_mod.f90
+utilities/utilities_mod.f90
More information about the Dart-dev
mailing list