///////////////////////////////////////////////////////////////////////////////
//
// grid_stat configuration file
//
////////////////////////////////////////////////////////////////////////////////

//
// Specify a name to designate the model being verified.  This name will be
// written to the second column of the ASCII output generated.
//
model = "CNRFC";

//
// Specify a comma-separated list of fields to be verified.  The forecast and
// observation fields may be specified separately.  If the obs_field parameter
// is left blank, it will default to the contents of fcst_field.
//
// Each field is specified as a grib code or corresponding grib code
// abbreviation followed by an accumulation or vertical level indicator.
//
// Each verification field is specified as one of the following:
//    GC/ANNN for accumulation interval NNN
//    GC/ZNNN for vertical level NNN
//    GC/PNNN for pressure level NNN in hPa
//    GC/PNNN-NNN for a range of pressure levels in hPa
//    GC/LNNN for a generic level type
//    GC/RNNN for a specific GRIB record number
//    Where GC is the number of or abbreviation for the grib code
//    to be verified.
// http://www.nco.ncep.noaa.gov/pmb/docs/on388/table2.html
//
//    NOTE: To verify winds as vectors rather than scalars,
//          specify UGRD (or 33) followd by VGRD (or 34) with the
//          same level values.
//
//    NOTE: To process a probability field, add "/PROB", such as "POP/Z0/PROB".
//
// e.g. fcst_field[] = [ "61/A3", "APCP/A24", "RH/L10" ];
//
fcst_field[] = ["APCP/A24"];
obs_field[]  = ["APCP/A24"];

//
// Specify a comma-separated list of groups of thresholds to be applied to the
// fields listed above.  Thresholds for the forecast and observation fields
// may be specified separately.  If the obs_thresh parameter is left blank,
// it will default to the content of fcst_thresh.
//
// At least one threshold must be provided for each field listed above.  The
// lengths of the "fcst_field" and "fcst_thresh" arrays must match, as must
// lengths of the "obs_field" and "obs_thresh" arrays.  To apply multiple
// thresholds to a field, separate the threshold values with a space.
//
// Each threshold must be preceded by a two letter indicator for the type of
// thresholding to be performed:
//    'lt' for less than     'le' for less than or equal to
//    'eq' for equal to      'ne' for not equal to
//    'gt' for greater than  'ge' for greater than or equal to
//
// NOTE: Thresholds for probabilities must be preceeded by "ge".
//
// e.g. fcst_thresh[] = [ "gt0.0 ge5.0", "gt0.0", "lt80.0 ge80.0" ];
//
fcst_thresh[] = [ "ge0.01" ];
obs_thresh[]  = [ "ge0.01" ];

//
// Specify a comma-separated list of thresholds to be used when computing
// VL1L2 partial sums for winds.  The thresholds are applied to the wind speed
// values derived from each U/V pair.  Only those U/V pairs which meet the wind
// speed threshold criteria are retained.  If the obs_wind_thresh parameter is
// left blank, it will default to the contents of fcst_wind_thresh.
//
// To apply multiple wind speed thresholds, separate the threshold values with a
// space.  Use "NA" to indicate that no wind speed threshold should be applied.
//
// Each threshold must be preceded by a two letter indicator for the type of
// thresholding to be performed:
//    'lt' for less than     'le' for less than or equal to
//    'eq' for equal to      'ne' for not equal to
//    'gt' for greater than  'ge' for greater than or equal to
//    'NA' for no threshold
//
// e.g. fcst_wind_thresh[] = [ "NA", "ge1.0" ];
//
fcst_wind_thresh[] = [ "NA" ];
obs_wind_thresh[]  = [];

//
// Specify a comma-separated list of grids to be used in masking the data over
// which to perform scoring.  An empty list indicates that no masking grid
// should be performed.  The standard NCEP grids are named "GNNN" where NNN
// indicates the three digit grid number.  Enter "FULL" to score over the
// entire domain.
// http://www.nco.ncep.noaa.gov/pmb/docs/on388/tableb.html
//
// e.g. mask_grid[] = [ "FULL" ];
//
mask_grid[] = [ "FULL" ];

//
// Specify a comma-separated list of masking regions to be applied.
// An empty list indicates that no additional masks should be used.
// The masking regions may be defined in one of 4 ways:
//
// (1) An ASCII file containing a lat/lon polygon.
//     Latitude in degrees north and longitude in degrees east.
//     By default, the first and last polygon points are connected.
//     e.g. "MET_BASE/data/poly/EAST.poly" which consists of n points:
//          "poly_name lat1 lon1 lat2 lon2... latn lonn"
//
// (2) The NetCDF output of the gen_poly_mask tool.
//
// (3) A NetCDF data file, followed by the name of the NetCDF variable
//     to be used, and optionally, a threshold to be applied to the field.
//     e.g. "sample.nc var_name gt0.00"
//
// (4) A GRIB data file, followed by a description of the field
//     to be used, and optionally, a threshold to be applied to the field.
//     e.g. "sample.grb APCP/A3 gt0.00"
//
// Any NetCDF or GRIB file used must have the same grid dimensions as the
// data being verified.
//
// MET_BASE may be used in the path for the files above.
//
// e.g. mask_poly[] = [ "MET_BASE/data/poly/EAST.poly",
//                      "poly_mask.ncf",
//                      "sample.nc APCP",
//                      "sample.grb HGT/Z0 gt100.0" ];
//
mask_poly[] = [ "/home/esukovich/DATA/QPE/WEST.poly" ];

//
// Specify a comma-separated list of values for alpha to be used when computing
// confidence intervals.  Values of alpha must be between 0 and 1.
//
// e.g. ci_alpha[] = [ 0.05, 0.10 ];
//
ci_alpha[] = [ 0.05 ];

//
// Specify the method to be used for computing bootstrap confidence intervals.
// The value for this is interpreted as follows:
//    (0) Use the BCa interval method (computationally intensive)
//    (1) Use the percentile interval method
//
boot_interval = 1;

//
// Specify a proportion between 0 and 1 to define the replicate sample size
// to be used when computing percentile intervals.  The replicate sample
// size is set to boot_rep_prop * n, where n is the number of raw data points.
//
// e.g boot_rep_prop = 0.80;
//
boot_rep_prop = 1.0;

//
// Specify the number of times each set of matched pair data should be
// resampled when computing bootstrap confidence intervals.  A value of
// zero disables the computation of bootstrap condifence intervals.
//
// e.g. n_boot_rep = 1000;
//
n_boot_rep = 0;

//
// Specify the name of the random number generator to be used.  See the MET
// Users Guide for a list of possible random number generators.
//
boot_rng = "mt19937";

//
// Specify the seed value to be used when computing bootstrap confidence
// intervals.  If left unspecified, the seed will change for each run and
// the computed bootstrap confidence intervals will not be reproducable.
//
boot_seed = "";

//
// Specify a comma-separated list of interpolation method(s) to be used
// for smoothing the forecast grid prior to comparing it to the observation
// grid.  The value at each forecast grid point is replaced by the measure
// computed over the neighborhood defined around the grid point.
// String values are interpreted as follows:
//    MIN     = Minimum in the neighborhood
//    MAX     = Maximum in the neighborhood
//    MEDIAN  = Median in the neighborhood
//    UW_MEAN = Unweighted mean in the neighborhood
//
//    NOTE: The distance-weighted mean (DW_MEAN) is not an option here since
//          it will have no effect on a gridded field.
//
//    NOTE: The least-squares fit (LS_FIT) is not an option here since
//          it reduces to an unweighted mean on a grid.
//
// e.g. interp_method[] = [ "UW_MEAN", "MEDIAN" ];
//
interp_method[] = [ "UW_MEAN" ];

//
// Specify a comma-separated list of box widths to be used by the
// interpolation techniques listed above.  A value of 1 indicates that
// no smoothing should be performed.  For a value of n greater than 1,
// the n*n grid points around each point will be used to smooth
// the forecast field.
//
// e.g. interp_width = [ 1, 3, 5 ];
//
interp_width[] = [ 1 ];

//
// When smoothing, compute a ratio of the number of valid data points to
// the total number of points in the neighborhood.  If that ratio is less
// than this threshold, do not compute a smoothed forecast value.  This
// threshold must be between 0 and 1.  Setting this threshold to 1 will
// require that each observation be surrounded by n*n valid forecast
// points.
//
// e.g. interp_thresh = 1.0;
//
interp_thresh = 1.0;

//
// Specify a comma-separated list of box widths to be used to define
// the neighborhood size for the neighborhood verification methods.
// For a value of n greater than 1, the n*n grid points around each point
// will be used to define the neighborhood.
//
// e.g. nbr_width = [ 3, 5 ];
//
nbr_width[] = [ 3, 5 ];

//
// When applying the neighborhood verification methods, compute a ratio
// of the number of valid data points to the total number of points in
// the neighborhood.  If that ratio is less than this threshold, do not
// include it in the computations.  This threshold must be between 0
// and 1.  Setting this threshold to 1 will require that each point be
// surrounded by n*n valid forecast points.
//
// e.g. nbr_thresh = 1.0;
//
nbr_thresh = 1.0;

//
// When applying the neighborhood verification methods, apply a threshold
// to the fractional coverage values to define contingency tables from
// which to compute statistics.
//
// e.g. cov_thresh[] = [ "ge0.25", "ge0.50" ];
//
cov_thresh[] = [ "ge0.5" ];

//
// Specify flags to indicate the type of data to be output:
//    (1) STAT and FHO Text Files, Forecast, Hit, Observation Rates:
//           Total (TOTAL),
//           Forecast Rate (F_RATE),
//           Hit Rate (H_RATE),
//           Observation Rate (O_RATE)
//
//    (2) STAT and CTC Text Files, Contingency Table Counts:
//           Total (TOTAL),
//           Forecast Yes and Observation Yes Count (FY_OY),
//           Forecast Yes and Observation No Count (FY_ON),
//           Forecast No and Observation Yes Count (FN_OY),
//           Forecast No and Observation No Count (FN_ON)
//
//    (3) STAT and CTS Text Files, Contingency Table Scores:
//           Total (TOTAL),
//           Base Rate (BASER), BASER_CL, BASER_CU,
//           Forecast Mean (FMEAN), FMEAN_CL, FMEAN_CU,
//           Accuracy (ACC), ACC_CL, ACC_CU,
//           Bias (BIAS),
//           Probability of Detecting Yes (PODY), PODY_CL, PODY_CU,
//           Probability of Detecting No (PODN), PODN_CL, PODN_CU,
//           Probability of False Detection (POFD), POFD_CL, POFD_CU,
//           False Alarm Ratio (FAR), FAR_CL, FAR_CU,
//           Critical Success Index (CSI), CSI_CL, CSI_CU,
//           Gilbert Skill Score (GSS),
//           Hanssen and Kuipers Discriminant (HK), HK_CL, HK_CU,
//           Heidke Skill Score (HSS),
//           Odds Ratio (ODDS), ODDS_CL, ODDS_CU
//
//    (4) STAT and CNT Text Files, Statistics of Continuous Variables:
//           Total (TOTAL),
//           Forecast Mean (FBAR), FBAR_CL, FBAR_CU,
//           Forecast Standard Deviation (FSTDEV), FSTDEV_CL, FSTDEV_CU
//           Observation Mean (OBAR), OBAR_CL, OBAR_CU,
//           Observation Standard Deviation (OSTDEV), OSTDEV_CL, OSTDEV_CU,
//           Pearson's Correlation Coefficient (PR_CORR), PR_CORR_CL, PR_CORR_CU,
//           Spearman's Rank Correlation Coefficient (SP_CORR),
//           Kendall Tau Rank Correlation Coefficient (KT_CORR),
//           Number of ranks compared (RANKS),
//           Number of tied ranks in the forecast field (FRANK_TIES),
//           Number of tied ranks in the observation field (ORANK_TIES),
//           Mean Error (ME), ME_CL, ME_CU,
//           Standard Deviation of the Error (ESTDEV), ESTDEV_CL, ESTDEV_CU,
//           Frequency Bias (FBIAS),
//           Mean Absolute Error (MAE),
//           Mean Squared Error (MSE),
//           Bias-Corrected Mean Squared Error (BCMSE),
//           Root Mean Squared Error (RMSE),
//           Percentiles of the Error (E10, E25, E50, E75, E90)
//
//           NOTE: CL and CU values define lower and upper
//                 confidence interval limits.
//
//    (5) STAT and SL1L2 Text Files, Scalar Partial Sums:
//           Total (TOTAL),
//           Forecast Mean (FBAR),
//              = mean(f)
//           Observation Mean (OBAR),
//              = mean(o)
//           Forecast*Observation Product Mean (FOBAR),
//              = mean(f*o)
//           Forecast Squared Mean (FFBAR),
//              = mean(f^2)
//           Observation Squared Mean (OOBAR)
//              = mean(o^2)
//
//    (6) STAT and VL1L2 Text Files, Vector Partial Sums:
//           Total (TOTAL),
//           U-Forecast Mean (UFBAR),
//              = mean(uf)
//           V-Forecast Mean (VFBAR),
//              = mean(vf)
//           U-Observation Mean (UOBAR),
//              = mean(uo)
//           V-Observation Mean (VOBAR),
//              = mean(vo)
//           U-Product Plus V-Product (UVFOBAR),
//              = mean(uf*uo+vf*vo)
//           U-Forecast Squared Plus V-Forecast Squared (UVFFBAR),
//              = mean(uf^2+vf^2)
//           U-Observation Squared Plus V-Observation Squared (UVOOBAR)
//              = mean(uo^2+vo^2)
//
//    (7) STAT and PCT Text Files, Nx2 Probability Contingency Table Counts:
//           Total (TOTAL),
//           Number of Forecast Probability Thresholds (N_THRESH),
//           Probability Threshold Value (THRESH_i),
//           Row Observation Yes Count (OY_i),
//           Row Observation No Count (ON_i),
//           NOTE: Previous 3 columns repeated for each row in the table
//           Last Probability Threshold Value (THRESH_n)
//
//    (8) STAT and PSTD Text Files, Nx2 Probability Contingency Table Scores:
//           Total (TOTAL),
//           Number of Forecast Probability Thresholds (N_THRESH),
//           Reliability (RELIABILITY),
//           Resolution (RESOLUTION),
//           Uncertainty (UNCERTAINTY),
//           Area Under the ROC Curve (ROC_AUC),
//           Brier Score (BRIER), BRIER_NCL, BRIER_NCU,
//           Probability Threshold Value (THRESH_i)
//           NOTE: Previous column repeated for each probability threshold
//
//    (9) STAT and PJC Text Files, Joint/Continuous Statistics of
//                                 Probabilistic Variables:
//           Total (TOTAL),
//           Number of Forecast Probability Thresholds (N_THRESH),
//           Probability Threshold Value (THRESH_i),
//           Observation Yes Count Divided by Total (OY_TP_i),
//           Observation No Count Divided by Total (ON_TP_i),
//           Calibration (CALIBRATION_i),
//           Refinement (REFINEMENT_i),
//           Likelikhood (LIKELIHOOD_i),
//           Base Rate (BASER_i),
//           NOTE: Previous 7 columns repeated for each row in the table
//           Last Probability Threshold Value (THRESH_n)
//
//   (10) STAT and PRC Text Files, ROC Curve Points for
//                                 Probabilistic Variables:
//           Total (TOTAL),
//           Number of Forecast Probability Thresholds (N_THRESH),
//           Probability Threshold Value (THRESH_i),
//           Probability of Detecting Yes (PODY_i),
//           Probability of False Detection (POFD_i),
//           NOTE: Previous 3 columns repeated for each row in the table
//           Last Probability Threshold Value (THRESH_n)
//
//   (11) STAT and NBRCTC Text Files, Neighborhood Methods Contingency Table Counts:
//           Total (TOTAL),
//           Forecast Yes and Observation Yes Count (FY_OY),
//           Forecast Yes and Observation No Count (FY_ON),
//           Forecast No and Observation Yes Count (FN_OY),
//           Forecast No and Observation No Count (FN_ON),
//           Fractional Threshold Value (FRAC_T),
//           Neighborhood Size (INTERP_PNTS)
//
//   (12) STAT and NBRCTS Text Files, Neighborhood Methods Contingency Table Scores:
//           Total (TOTAL),
//           Base Rate (BASER), BASER_CL, BASER_CU,
//           Forecast Mean (FMEAN), FMEAN_CL, FMEAN_CU,
//           Accuracy (ACC), ACC_CL, ACC_CU,
//           Bias (BIAS),
//           Probability of Detecting Yes (PODY), PODY_CL, PODY_CU,
//           Probability of Detecting No (PODN), PODN_CL, PODN_CU,
//           Probability of False Detection (POFD), POFD_CL, POFD_CU,
//           False Alarm Ratio (FAR), FAR_CL, FAR_CU,
//           Critical Success Index (CSI), CSI_CL, CSI_CU,
//           Gilbert Skill Score (GSS),
//           Hanssen and Kuipers Discriminant (HK), HK_CL, HK_CU,
//           Heidke Skill Score (HSS),
//           Odds Ratio (ODDS), ODDS_CL, ODDS_CU
//           Fractional Threshold Value (FRAC_T),
//           Neighborhood Size (INTERP_PNTS)
//
//   (13) STAT and NBRCNT Text Files, Neighborhood Methods Continuous Scores:
//           Total (TOTAL),
//           Fractions Brier Score (FBS),
//           Fractions Skill Score (FSS),
//           Neighborhood Size (INTERP_PNTS)
//
//   (14) NetCDF File containing difference fields for each grib
//        code/mask combination.  A non-zero value indicates that
//        this NetCDF file should be produced.  A value of 0
//        indicates that it should not be produced.
//
// Values for flags (1) through (8) are interpreted as follows:
//    (0) Do not generate output of this type
//    (1) Write output to a STAT file
//    (2) Write output to a STAT file and a text file
//
output_flag[] = [ 0, 0, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ];

//
// Flag to indicate whether Kendall's Tau and Spearman's Rank Correlation
// Coefficients should be computed.  Computing them over large datasets is
// computationally intensive and slows down the runtime execution significantly.
//    (0) Do not compute these correlation coefficients
//    (1) Compute these correlation coefficients
//
rank_corr_flag = 1;

//
// Specify the GRIB Table 2 parameter table version number to be used
// for interpreting GRIB codes.
// http://www.nco.ncep.noaa.gov/pmb/docs/on388/table2.html
//
grib_ptv = 2;

//
// Directory where temporary files should be written.
//
tmp_dir = "/home/esukovich/DATA/QPE/tmp";

//
// Prefix to be used for the output file names.
//
output_prefix = "";

//
// Indicate a version number for the contents of this configuration file.
// The value should generally not be modified.
//
version = "V2.0";
