////////////////////////////////////////////////////////////////////////////////
//
// Default point_stat configuration file
//
////////////////////////////////////////////////////////////////////////////////

//
// Specify a name to designate the model being verified.  This name will be
// written to the second column of the ASCII output generated.
//
model = "WFO CRP WRF-ARW";

//
// Beginning and ending time offset values in seconds for observations
// to be used.  These time offsets are defined in reference to the
// forecast valid time, v.  Observations with a valid time falling in the
// window [v+beg_ds, v+end_ds] will be used.
// These selections are overridden by the command line arguments
// -valid_beg and -valid_end.
//
beg_ds = -5400;
end_ds =  5400;

//
// Specify a comma-separated list of fields to be verified.  The forecast and
// observation fields may be specified separately.  If the obs_field parameter
// is left blank, it will default to the contents of fcst_field.
//
// Each field is specified as a grib code or corresponding grib code
// abbreviation followed by an accumulation or vertical level indicator.
//
// Each verification field is specified as one of the following:
//    GC/ANNN for accumulation interval NNN
//    GC/ZNNN for vertical level NNN
//    GC/PNNN for pressure level NNN in hPa
//    GC/PNNN-NNN for a range of pressure levels in hPa
//    GC/LNNN for a generic level type
//    GC/RNNN for a specific GRIB record number
//    Where GC is the number of or abbreviation for the grib code
//    to be verified.
// http://www.nco.ncep.noaa.gov/pmb/docs/on388/table2.html
//
//    NOTE: To verify winds as vectors rather than scalars,
//          specify UGRD (or 33) followd by VGRD (or 34) with the
//          same level values.
//
//    NOTE: To process a probability field, add "/PROB", such as "POP/Z0/PROB".
//
// e.g. fcst_field[] = [ "SPFH/P500", "TMP/P500" ];
//
// fcst_field[] = [ "SPFH/P500", "TMP/P500", "HGT/P500",
//                 "UGRD/P500", "VGRD/P500" ];

//fcst_field[] = [ "TMP/R026" ];
fcst_field[] = [ "11/Z2" ];
obs_field[]  = [ "11/Z2" ];

//
// Specify a comma-separated list of groups of thresholds to be applied to the
// fields listed above.  Thresholds for the forecast and observation fields
// may be specified separately.  If the obs_thresh parameter is left blank,
// it will default to the contents of fcst_thresh.
//
// At least one threshold must be provided for each field listed above.  The
// lengths of the "fcst_field" and "fcst_thresh" arrays must match, as must
// lengths of the "obs_field" and "obs_thresh" arrays.  To apply multiple
// thresholds to a field, separate the threshold values with a space.
//
// Each threshold must be preceded by a two letter indicator for the type of
// thresholding to be performed:
//    'lt' for less than     'le' for less than or equal to
//    'eq' for equal to      'ne' for not equal to
//    'gt' for greater than  'ge' for greater than or equal to
//
// NOTE: Thresholds for probabilities must be preceeded by "ge".
//
// e.g. fcst_thresh[] = [ "gt80", "gt273" ];
//
//fcst_thresh[] = [ "gt80", "gt273", "gt0.0", "gt5.0", "gt5.0" ];
fcst_thresh[] = [ "gt0.0" ];
//fcst_thresh[] = [];

obs_thresh[]  = [];

//
// Specify a comma-separated list of thresholds to be used when computing
// VL1L2 and VAL1L2 partial sums for winds.  The thresholds are applied to the
// wind speed values derived from each U/V pair.  Only those U/V pairs which meet
// the wind speed threshold criteria are retained.  If the obs_wind_thresh
// parameter is left blank, it will default to the contents of fcst_wind_thresh.
//
// To apply multiple wind speed thresholds, separate the threshold values with a
// space.  Use "NA" to indicate that no wind speed threshold should be applied.
//
// Each threshold must be preceded by a two letter indicator for the type of
// thresholding to be performed:
//    'lt' for less than     'le' for less than or equal to
//    'eq' for equal to      'ne' for not equal to
//    'gt' for greater than  'ge' for greater than or equal to
//    'NA' for no threshold
//
// e.g. fcst_wind_thresh[] = [ "NA", "ge1.0" ];
//
fcst_wind_thresh[] = [ "NA" ];
obs_wind_thresh[]  = [];

//
// Specify a comma-separated list of PrepBufr message types with which
// to perform the verification.  Statistics will be computed separately
// for each message type specified.  At least one PrepBufr message type
// must be provided.
// List of valid message types:
//    ADPUPA AIRCAR AIRCFT ADPSFC ERS1DA GOESND GPSIPW
//    MSONET PROFLR QKSWND RASSDA SATEMP SATWND SFCBOG
//    SFCSHP SPSSMI SYNDAT VADWND
//    ANYAIR (= AIRCAR, AIRCFT)
//    ANYSFC (= ADPSFC, SFCSHP, ADPUPA, PROFLR)
//    ONLYSF (= ADPSFC, SFCSHP)
// http://www.emc.ncep.noaa.gov/mmb/data_processing/prepbufr.doc/table_1.htm
//
// e.g. message_type[] = [ "ADPUPA", "AIRCAR" ];
//
message_type[] = [ "ADPSFC" ];

//
// Specify a comma-separated list of grids to be used in masking the data over
// which to perform scoring.  An empty list indicates that no masking grid
// should be performed.  The standard NCEP grids are named "GNNN" where NNN
// indicates the three digit grid number.  Enter "FULL" to score over the
// entire domain.
// http://www.nco.ncep.noaa.gov/pmb/docs/on388/tableb.html
//
// e.g. mask_grid[] = [ "FULL" ];
//
mask_grid[] = [ "FULL" ];

//
// Specify a comma-separated list of masking regions to be applied.
// An empty list indicates that no additional masks should be used.
// The masking regions may be defined in one of 4 ways:
//
// (1) An ASCII file containing a lat/lon polygon.
//     Latitude in degrees north and longitude in degrees east.
//     By default, the first and last polygon points are connected.
//     e.g. "MET_BASE/data/poly/EAST.poly" which consists of n points:
//          "poly_name lat1 lon1 lat2 lon2... latn lonn"
//
// (2) The NetCDF output of the gen_poly_mask tool.
//
// (3) A NetCDF data file, followed by the name of the NetCDF variable
//     to be used, and optionally, a threshold to be applied to the field.
//     e.g. "sample.nc var_name gt0.00"
//
// (4) A GRIB data file, followed by a description of the field
//     to be used, and optionally, a threshold to be applied to the field.
//     e.g. "sample.grb APCP/A3 gt0.00"
//
// Any NetCDF or GRIB file used must have the same grid dimensions as the
// data being verified.
//
// MET_BASE may be used in the path for the files above.
//
// e.g. mask_poly[] = [ "MET_BASE/data/poly/EAST.poly",
//                      "poly_mask.ncf",
//                      "sample.nc APCP",
//                      "sample.grb HGT/Z0 gt100.0" ];
//
//mask_poly[] = [ "MET_BASE/data/poly/EAST.poly" ];
//mask_poly[] = [ "/usr3/ver/inputdata/MTR/2009/04/test/WFOCRP.nc" ];
mask_poly[] = [];
// Specify the name of an ASCII file containing a space-separated list of
// station ID's at which to perform verification.  Each station ID specified
// is treated as an individual masking region.
//
// An empty list file name indicates that no station ID masks should be used.
//
// MET_BASE may be used in the path for the station ID mask file name.
//
// e.g. mask_sid = "MET_BASE/data/stations/CONUS.stations";
//
mask_sid = "/usr3/ver/conf/MTR.stations";

//
// Specify a comma-separated list of values for alpha to be used when computing
// confidence intervals.  Values of alpha must be between 0 and 1.
//
// e.g. ci_alpha[] = [ 0.05, 0.10 ];
//
ci_alpha[] = [ 0.05 ];

//
// Specify the method to be used for computing bootstrap confidence intervals.
// The value for this is interpreted as follows:
//    (0) Use the BCa interval method (computationally intensive)
//    (1) Use the percentile interval method
//
boot_interval = 1;

//
// Specify a proportion between 0 and 1 to define the replicate sample size
// to be used when computing percentile intervals.  The replicate sample
// size is set to boot_rep_prop * n, where n is the number of raw data points.
//
// e.g boot_rep_prop = 0.80;
//
boot_rep_prop = 1.0;

//
// Specify the number of times each set of matched pair data should be
// resampled when computing bootstrap confidence intervals.  A value of
// zero disables the computation of bootstrap condifence intervals.
//
// e.g. n_boot_rep = 1000;
//
n_boot_rep = 1000;

//
// Specify the name of the random number generator to be used.  See the MET
// Users Guide for a list of possible random number generators.
//
boot_rng = "mt19937";

//
// Specify the seed value to be used when computing bootstrap confidence
// intervals.  If left unspecified, the seed will change for each run and
// the computed bootstrap confidence intervals will not be reproducable.
//
boot_seed = "";

//
// Specify a comma-separated list of interpolation method(s) to be used
// for comparing the forecast grid to the observation points.  String values
// are interpreted as follows:
//    MIN     = Minimum in the neighborhood
//    MAX     = Maximum in the neighborhood
//    MEDIAN  = Median in the neighborhood
//    UW_MEAN = Unweighted mean in the neighborhood
//    DW_MEAN = Distance-weighted mean in the neighborhood
//    LS_FIT  = Least-squares fit in the neighborhood
//
// In all cases, vertical interpolation is performed in the natural log
// of pressure of the levels above and below the observation.
//
// e.g. interp_method[] = [ "UW_MEAN", "MEDIAN" ];
//
interp_method[] = [ "DW_MEAN" ];

//
// Specify a comma-separated list of box widths to be used by the
// interpolation techniques listed above.  A value of 1 indicates that
// the nearest neighbor approach should be used.  For a value of n
// greater than 1, the n*n grid points closest to the observation define
// the neighborhood.
//
// e.g. interp_width = [ 1, 3, 5 ];
//
interp_width[] = [ 3 ];

//
// When interpolating, compute a ratio of the number of valid data points
// to the total number of points in the neighborhood.  If that ratio is
// less than this threshold, do not include the observation.  This
// threshold must be between 0 and 1.  Setting this threshold to 1 will
// require that each observation be surrounded by n*n valid forecast
// points.
//
// e.g. interp_thresh = 1.0;
//
interp_thresh = 1.0;

//
// Specify flags to indicate the type of data to be output:
//    (1) STAT and FHO Text Files, Forecast, Hit, Observation Rates:
//           Total (TOTAL),
//           Forecast Rate (F_RATE),
//           Hit Rate (H_RATE),
//           Observation Rate (O_RATE)
//
//    (2) STAT and CTC Text Files, Contingency Table Counts:
//           Total (TOTAL),
//           Forecast Yes and Observation Yes Count (FY_OY),
//           Forecast Yes and Observation No Count (FY_ON),
//           Forecast No and Observation Yes Count (FN_OY),
//           Forecast No and Observation No Count (FN_ON)
//
//    (3) STAT and CTS Text Files, Contingency Table Scores:
//           Total (TOTAL),
//           Base Rate (BASER), BASER_CL, BASER_CU,
//           Forecast Mean (FMEAN), FMEAN_CL, FMEAN_CU,
//           Accuracy (ACC), ACC_CL, ACC_CU,
//           Frequency Bias (FBIAS),
//           Probability of Detecting Yes (PODY), PODY_CL, PODY_CU,
//           Probability of Detecting No (PODN), PODN_CL, PODN_CU,
//           Probability of False Detection (POFD), POFD_CL, POFD_CU,
//           False Alarm Ratio (FAR), FAR_CL, FAR_CU,
//           Critical Success Index (CSI), CSI_CL, CSI_CU,
//           Gilbert Skill Score (GSS),
//           Hanssen and Kuipers Discriminant (HK), HK_CL, HK_CU,
//           Heidke Skill Score (HSS),
//           Odds Ratio (ODDS), ODDS_CL, ODDS_CU
//
//    (4) STAT and CNT Text Files, Statistics of Continuous Variables:
//           Total (TOTAL),
//           Forecast Mean (FBAR), FBAR_CL, FBAR_CU,
//           Forecast Standard Deviation (FSTDEV), FSTDEV_CL, FSTDEV_CU
//           Observation Mean (OBAR), OBAR_CL, OBAR_CU,
//           Observation Standard Deviation (OSTDEV), OSTDEV_CL, OSTDEV_CU,
//           Pearson's Correlation Coefficient (PR_CORR), PR_CORR_CL, PR_CORR_CU,
//           Spearman's Rank Correlation Coefficient (SP_CORR),
//           Kendall Tau Rank Correlation Coefficient (KT_CORR),
//           Number of ranks compared (RANKS),
//           Number of tied ranks in the forecast field (FRANK_TIES),
//           Number of tied ranks in the observation field (ORANK_TIES),
//           Mean Error (ME), ME_CL, ME_CU,
//           Standard Deviation of the Error (ESTDEV), ESTDEV_CL, ESTDEV_CU,
//           Bias (BIAS = FBAR - OBAR),
//           Mean Absolute Error (MAE),
//           Mean Squared Error (MSE),
//           Bias-Corrected Mean Squared Error (BCMSE),
//           Root Mean Squared Error (RMSE),
//           Percentiles of the Error (E10, E25, E50, E75, E90)
//
//           NOTE: CL and CU values define lower and upper
//                 confidence interval limits.
//
//    (5) STAT and SL1L2 Text Files, Scalar Partial Sums:
//           Total (TOTAL),
//           Forecast Mean (FBAR),
//              = mean(f)
//           Observation Mean (OBAR),
//              = mean(o)
//           Forecast*Observation Product Mean (FOBAR),
//              = mean(f*o)
//           Forecast Squared Mean (FFBAR),
//              = mean(f^2)
//           Observation Squared Mean (OOBAR)
//              = mean(o^2)
//
//    (6) STAT and SAL1L2 Text Files, Scalar Anomaly Partial Sums:
//           Total (TOTAL),
//           Forecast Anomaly Mean (FABAR),
//              = mean(f-c)
//           Observation Anomaly Mean (OABAR),
//              = mean(o-c)
//           Product of Forecast and Observation Anomalies Mean (FOABAR),
//              = mean((f-c)*(o-c))
//           Forecast Anomaly Squared Mean (FFABAR),
//              = mean((f-c)^2)
//           Observation Anomaly Squared Mean (OOABAR)
//              = mean((o-c)^2)
//
//    (7) STAT and VL1L2 Text Files, Vector Partial Sums:
//           Total (TOTAL),
//           U-Forecast Mean (UFBAR),
//              = mean(uf)
//           V-Forecast Mean (VFBAR),
//              = mean(vf)
//           U-Observation Mean (UOBAR),
//              = mean(uo)
//           V-Observation Mean (VOBAR),
//              = mean(vo)
//           U-Product Plus V-Product (UVFOBAR),
//              = mean(uf*uo+vf*vo)
//           U-Forecast Squared Plus V-Forecast Squared (UVFFBAR),
//              = mean(uf^2+vf^2)
//           U-Observation Squared Plus V-Observation Squared (UVOOBAR)
//              = mean(uo^2+vo^2)
//
//    (8) STAT and VAL1L2 Text Files, Vector Anomaly Partial Sums:
//           U-Forecast Anomaly Mean (UFABAR),
//              = mean(uf-uc)
//           V-Forecast Anomaly Mean (VFABAR),
//              = mean(vf-vc)
//           U-Observation Anomaly Mean (UOABAR),
//              = mean(uo-uc)
//           V-Observation Anomaly Mean (VOABAR),
//              = mean(vo-vc)
//           U-Anomaly Product Plus V-Anomaly Product (UVFOABAR),
//              = mean((uf-uc)*(uo-uc)+(vf-vc)*(vo-vc))
//           U-Forecast Anomaly Squared Plus V-Forecast Anomaly Squared (UVFFABAR),
//              = mean((uf-uc)^2+(vf-vc)^2)
//           U-Observation Anomaly Squared Plus V-Observation Anomaly Squared (UVOOABAR)
//              = mean((uo-uc)^2+(vo-vc)^2)
//
//    (9) STAT and PCT Text Files, Nx2 Probability Contingency Table Counts:
//           Total (TOTAL),
//           Number of Forecast Probability Thresholds (N_THRESH),
//           Probability Threshold Value (THRESH_i),
//           Row Observation Yes Count (OY_i),
//           Row Observation No Count (ON_i),
//           NOTE: Previous 3 columns repeated for each row in the table
//           Last Probability Threshold Value (THRESH_n)
//
//   (10) STAT and PSTD Text Files, Nx2 Probability Contingency Table Scores:
//           Total (TOTAL),
//           Number of Forecast Probability Thresholds (N_THRESH),
//           Reliability (RELIABILITY),
//           Resolution (RESOLUTION),
//           Uncertainty (UNCERTAINTY),
//           Area Under the ROC Curve (ROC_AUC),
//           Brier Score (BRIER), BRIER_NCL, BRIER_NCU,
//           Probability Threshold Value (THRESH_i)
//           NOTE: Previous column repeated for each probability threshold
//
//   (11) STAT and PJC Text Files, Joint/Continuous Statistics of
//                                 Probabilistic Variables:
//           Total (TOTAL),
//           Number of Forecast Probability Thresholds (N_THRESH),
//           Probability Threshold Value (THRESH_i),
//           Observation Yes Count Divided by Total (OY_TP_i),
//           Observation No Count Divided by Total (ON_TP_i),
//           Calibration (CALIBRATION_i),
//           Refinement (REFINEMENT_i),
//           Likelikhood (LIKELIHOOD_i),
//           Base Rate (BASER_i),
//           NOTE: Previous 7 columns repeated for each row in the table
//           Last Probability Threshold Value (THRESH_n)
//
//   (12) STAT and PRC Text Files, ROC Curve Points for
//                                 Probabilistic Variables:
//           Total (TOTAL),
//           Number of Forecast Probability Thresholds (N_THRESH),
//           Probability Threshold Value (THRESH_i),
//           Probability of Detecting Yes (PODY_i),
//           Probability of False Detection (POFD_i),
//           NOTE: Previous 3 columns repeated for each row in the table
//           Last Probability Threshold Value (THRESH_n)
//
//   (13) STAT and MPR Text Files, Matched Pair Data:
//           Total (TOTAL),
//           Index (INDEX),
//           Latitude (LAT),
//           Longitude (LON),
//           Level (LEVEL),
//           Forecast Value (FCST),
//           Observation Value (OBS),
//           Climatological Value (CLIMO),
//           Interpolation Methold (INTERP_MTHD),
//           Interpolation Points (INTERP_PNTS)
//
//   In the expressions above, f are forecast values, o are observed values,
//   and c are climatological values.
//
// Values for these flags are interpreted as follows:
//    (0) Do not generate output of this type
//    (1) Write output to a STAT file
//    (2) Write output to a STAT file and a text file
//
output_flag[] = [ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2 ];

//
// Flag to indicate whether Kendall's Tau and Spearman's Rank Correlation
// Coefficients should be computed.  Computing them over large datasets is
// computationally intensive and slows down the runtime execution significantly.
//    (0) Do not compute these correlation coefficients
//    (1) Compute these correlation coefficients
//
rank_corr_flag = 1;

//
// Specify the GRIB Table 2 parameter table version number to be used
// for interpreting GRIB codes.
// http://www.nco.ncep.noaa.gov/pmb/docs/on388/table2.html
//
grib_ptv = 2;

//
// Directory where temporary files should be written.
//
tmp_dir = "/usr3/ver/output/tmp";

//
// Prefix to be used for the output file names.
//
output_prefix = "ARW";

//
// Indicate a version number for the contents of this configuration file.
// The value should generally not be modified.
//
version = "V2.0";
