; Program to examine the HAMSR thinned data on 5x5 km grid ; an determine variance of errors using 3 cornered hat method (Anthes and Rieckh 2018) ; Reads output for specific humidity from netcdf file saved for ERA5 ; Reads dropsondes, ERA5, and HAMSR ; Andrew Kren ; January 25, 2019 begin path_hamsr = "/scratch4/BMC/shout/Andrew.Kren/HAMSR/netcdf/" ; location of HAMSR data path_avaps = "/scratch4/BMC/shout/Andrew.Kren/avaps_data/hrr_2016/netcdf/" ; location of dropsonde observations era_path = "/scratch4/BMC/shout/Andrew.Kren/HAMSR/" dist_thresh = 100 ; how close spatially to compare hamsr to drops, in km time_thresh = 60 ; how close in time to compare hamsr to drops, in minutes ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ; end of user's settings ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; fils = (era_path+"saved_era5_q.nc") ; ERA5 variable w = addfile(fils,"r") q_era = w->q_era q_era!0 = "time" q_era!1 = "lev" q_era!2 = "lat" q_era!3 = "lon" delete(w) fils = (era_path+"saved_era5_rh.nc") ; ERA5 metadata lev_era = (/100,150,200,250,300,350,400,450,500,550,600,650,700,725,750,775,800,825,850,875,900,925,950,975,1000/) w = addfile(fils,"r") lat_era = w->lat_era lon_era = w->lon_era time_era = w->time_era q_era!0 = "time" q_era!1 = "lev" q_era!2 = "lat" q_era!3 = "lon" q_era&time = time_era q_era&lev = lev_era q_era&lat = lat_era q_era&lon = lon_era delete(w) delete(fils) ; read HAMSR fils = systemfunc("ls " + path_hamsr + "*2016*.nc") ; file paths w = addfiles(fils,"r") ; read data lat = w[:]->lat lat := lat * 0.001 lon = w[:]->lon lon := lon * 0.001 time = w[:]->time time@units = "seconds since 2000-01-01 00:00:00.0" t = short2flt(w[:]->ham_airT) q = short2flt(w[:]->ham_airQ) rh = short2flt(w[:]->ham_airRH) lev = short2flt(w[:]->ham_pres_levels) lev := lev(0:24) t@units = "K" delete(w) ; reorder arrays since time in original files is non-monotonic ;--- return permutation vector for sorting time ip = dim_pqsort(t&time,1) ; Use ip vector to reorder arrays. This will also have the effect ; of reordering the time coordinate array. ; t := t(ip,:) time := time(ip) lat := lat(ip) lon := lon(ip) q := q(ip,:) rh := rh(ip,:) ; calculate specific humidity shum = mixhum_ptrh(conform(t,lev,1),t,rh,-2) ; g/kg ; calculate dates time_hamsr = cd_calendar(time,0) year = tostring(tointeger(time_hamsr(:,0))) month = tostring(tointeger(time_hamsr(:,1))) day = tostring(tointeger(time_hamsr(:,2))) hour = tostring(tointeger(time_hamsr(:,3))) minute = tostring(tointeger(time_hamsr(:,4))) sec = tostring(tointeger(time_hamsr(:,5))) ; fix time to add missing zeros to certain dates and times month = where(tointeger(month).lt.10,"0"+month,month) day = where(tointeger(day).lt.10,"0"+day,day) hour = where(tointeger(hour).lt.10,"0"+hour,hour) minute = where(tointeger(minute).lt.10,"0"+minute,minute) sec = where(tointeger(sec).lt.10,"0"+sec,sec) ; read in the avaps dropsonde data ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; delete(fils) fils2 = systemfunc("csh -c 'cd " + path_avaps + " ; ls *.nc'") ; for extracting dates fils = systemfunc("ls " + path_avaps + "*.nc") ; file paths ; match dates between drop release and hamsr profiles ; find closest values by looping over drop files length = dimsizes(fils) t_drop = new((/length,25/),"float") q_drop = t_drop rh_drop = t_drop t_era_save = t_drop rh_era_save = t_drop q_era_save = t_drop q_ham = t_drop rh_ham = t_drop t_ham = t_drop q_drop_ham_diff = t_drop q_era_drop_diff = t_drop q_ham_era_diff = t_drop z = 0 z_counter = 1 do i=0,dimsizes(fils)-1 ; loop over dropsondes ;print("Working on dropsonde number: " + i) w = addfile(fils(i),"r") ; read data lat_d = w->lat lon_d = w->lon t_d = (w->temp) + 273.15 ; convert to K in the process rh_d = w->rh lev_d = w->pres t_d@units = "K" t_d@_FillValue = -999 rh_d@_FillValue = -999 indices_lev = ind(lev_d.ne.-999) ; non-missing values only lev_d := lev_d(indices_lev) t_d := t_d(indices_lev) rh_d := rh_d(indices_lev) lat_d := lat_d(indices_lev) lon_d := lon_d(indices_lev) ; get dates from file ; get time of release of drop from the files year_d = tointeger(str_get_cols(fils2(i),1,4)) ; year, month, day, hour, minute, sec in UTC, same for hamsr month_d = tointeger(str_get_cols(fils2(i),5,6)) day_d = tointeger(str_get_cols(fils2(i),7,8)) hour_d = tointeger(str_get_cols(fils2(i),10,11)) minute_d = tointeger(str_get_cols(fils2(i),12,13)) sec_d = tointeger(str_get_cols(fils2(i),14,15)) ; compute drop specific humidity shum_d = mixhum_ptrh(conform(t_d,lev_d,0),t_d,rh_d,-2) ; g/kg shum_d@_FillValue = -999 units = "seconds since 2000-01-01 00:00:00.0" time_d = cd_inv_calendar(year_d,month_d,day_d,hour_d,minute_d,sec_d,units,0) ; compare hamsr to dropsonde release, check spatially and temporally ; find hamsr profiles that are within xx minutes of dropsonde release and within xx km of release point ; then match ERA5 with hamsr time and location time_ind = ind(abs(time-time_d).le.time_thresh*60) ; +/- xx minutes of drop release (indices) ; compute median of values after spatial comparison if (dimsizes(time_ind).ge.2) then ; require 2 or more for averaging lat_sub = lat(time_ind) ; for computing distances without a loop lat_sub = lat_d(0) lon_sub = lon(time_ind) lon_sub = lon_d(0) dist = gc_latlon(lat(time_ind),lon(time_ind),lat_sub,lon_sub,2,4) ; km distances dist_ind = ind(dist.le.dist_thresh) ; +/- dist_thresh if (dimsizes(dist_ind).ge.2) then ; require 2 or more for averaging z = z + 1 ; counter for drops comparisons shum_1 := shum(time_ind(dist_ind),:) temp_1 := t(time_ind(dist_ind),:) rh_1 := rh(time_ind(dist_ind),:) time_1 := time(time_ind(dist_ind)) ; hamsr times near drops lat_1 := lat(time_ind(dist_ind)) lon_1 := lon(time_ind(dist_ind)) shum_1 = shum_1(:,::-1) ; reverse pressure levels to match with drops and ERA5 temp_1 = temp_1(:,::-1) rh_1 = rh_1(:,::-1) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ; check if hamsr is monotonically increasing before doing comparisons ismon = isMonotonic(time_1) if (ismon.eq.0) then time_update := get_unique_values(time_1) ; avoid duplicate times in hamsr ; find indices for fixing arrays of hamsr data indexes := get1Dindex(time_1,time_update) time_1 := time_update ; set new time to the updated time shum_1 := shum_1(indexes,:) temp_1 := temp_1(indexes,:) rh_1 := rh_1(indexes,:) lat_1 := lat_1(indexes) lon_1 := lon_1(indexes) end if if (dimsizes(time_1).ge.2) then ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ; change longitudes to be degrees east from 0 to 360 in hamsr to match with ERA5 lon_1 = lon_1 + 360.0 ; degrees east (GH flies only in Atlantic so should work) ; interpolate the drops to hamsr pressure levels linlog = 2 ; ln(p) interpolation t_int = int2p(lev_d,t_d,lev(::-1),linlog) shum_int = int2p(lev_d,shum_d,lev(::-1),linlog) rh_int = int2p(lev_d,rh_d,lev(::-1),linlog) ; linearly interpolate HAMSR profiles to dropsonde release time shum_1 := linint1_n(time_1,shum_1,False,time_d,0,0) printVarSummary(q_era) ; bilinearly interpolate ERA5 to dropsonde release location q_era_int := linint2_points(lon_era,lat_era,q_era,False,lon_d(0),lat_d(0),0) ; linearly interpolate ERA5 to dropsonde release time q_era_int := linint1_n(time_era,q_era_int,False,time_d, 0, 0) ; time x lev x locations printVarSummary(q_era) exit ; save profiles for all three datasets to be used for three corner method later... ; save drops and hamsr, as long as no missing data in hamsr if (.not.all(ismissing(shum_1(0,:))).and..not.all(ismissing(shum_int))) then print("Saving Profile " + z_counter) q_drop(i,:) = shum_int q_ham(i,:) = shum_1(0,:) q_era_save(i,:) = rm_single_dims(q_era_int) q_drop_ham_diff(i,:) = q_drop(i,:) - q_ham(i,:) q_era_drop_diff(i,:) = q_era_save(i,:) - q_drop(i,:) q_ham_era_diff(i,:) = q_ham(i,:) - q_era_save(i,:) z_counter = z_counter + 1 else ;print("Data missing for this dropsonde, not saving any data for this comparison...") end if delete([/temp_1,shum_1,t_int,shum_int,rh_1,rh_int,q_era_int,time_1,lat_1,lon_1/]) end if end if delete([/dist,dist_ind,lon_sub,lat_sub/]) end if delete([/lat_d,lon_d,t_d,rh_d,lev_d,time_d,time_ind,shum_d,w,indices_lev/]) end do ; save varibles to netcdf for doing three corner hat method later in separate program print("Making netcdf file...") filo = "saved_era5_drops_hamsr_q.nc" system("rm -rf "+filo) ; remove any pre-existing file a = addfile(filo,"c") a@title = "Saved ERA5, Dropsondes, and HAMSR 2016 profiles" a@source = filo a@creation_date = systemfunc("date") a->dropsonde_q = q_drop a->hamsr_q = q_ham a->era_q = q_era_save a->q_drop_ham_diff = q_drop_ham_diff a->q_era_drop_diff = q_era_drop_diff a->q_ham_era_diff = q_ham_era_diff print("Program ended Successfully") end