diff --git a/2_SWSF_p1of5_Settings_v51.R b/2_SWSF_p1of5_Settings_v51.R index 474732b4..4504e14d 100644 --- a/2_SWSF_p1of5_Settings_v51.R +++ b/2_SWSF_p1of5_Settings_v51.R @@ -120,7 +120,7 @@ checkCompleteness <- FALSE # check linked BLAS library before simulation runs check.blas <- FALSE -#---Load functions +#---Load functions (don't forget the C functions!) rSWSF <- file.path(dir.code, "2_SWSF_p5of5_Functions_v51.RData") if (!file.exists(rSWSF) || !continueAfterAbort) { sys.source(sub(".RData", ".R", rSWSF), envir = attach(NULL, name = "swsf_funs")) @@ -183,16 +183,8 @@ do.ExtractExternalDatasets <- c( "GriddedDailyWeatherFromNRCan_10km_Canada", 0, # can only be used together with database "GriddedDailyWeatherFromNCEPCFSR_Global", 0, # can only be used together with database - #Mean monthly PPT, Tmin, Tmax conditions: if using NEX or GDO-DCP-UC-LLNL, climate condition names must be of the form SCENARIO.GCM with SCENARIO being used for ensembles; if using climatewizard, climate condition names must be equal to what is in the respective directories - #CMIP3 - "ExtractClimateChangeScenarios_CMIP3_ClimateWizardEnsembles_Global", 0, #50-km resolution for mean of 2070-2099 - "ExtractClimateChangeScenarios_CMIP3_ClimateWizardEnsembles_USA", 0, #12-km resolution for mean change between 2070-2099 and 1971-2000 - "ExtractClimateChangeScenarios_CMIP3_BCSD_GDODCPUCLLNL_USA", 0, #1/8-degree resolution - "ExtractClimateChangeScenarios_CMIP3_BCSD_GDODCPUCLLNL_Global", 0, #1/2-degree resolution - #CMIP5 - "ExtractClimateChangeScenarios_CMIP5_BCSD_GDODCPUCLLNL_USA", 0, #1/8-degree resolution - "ExtractClimateChangeScenarios_CMIP5_BCSD_GDODCPUCLLNL_Global", 0, #1/2-degree resolution - "ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_USA", 0, #30-arcsec resolution; requires live internet access + #Monthly PPT, Tmin, Tmax conditions: if using NEX or GDO-DCP-UC-LLNL, climate condition names must be of the form SCENARIO.GCM with SCENARIO being used for ensembles; if using climatewizard, climate condition names must be equal to what is in the respective directories + "ExtractClimateChangeScenarios", 0, #Mean monthly wind, relative humidity, and 100% - sunshine "ExtractSkyDataFromNOAAClimateAtlas_USA", 0, @@ -213,6 +205,20 @@ chunk_size.options <- list( DailyWeatherFromNCEPCFSR_Global = 100 # this is also OS-limited by the number of concurrently open files (on 'unix' platforms, check with 'ulimit -a') ) +opt_climsc_extr <- c( + # for each climate data set from which to extract, add an element like 'dataset1' + # priority of extraction: dataset1, dataset2, ... if multiple sources provide data for a location + # dataset = 'project_source' with + # - project = one string out of c("CMIP3", "CMIP5", "GeoMIP") + # - source = one string out of: + # - "ClimateWizardEnsembles_Global": mean monthly values at 50-km resolution for 2070-2099 + # - "ClimateWizardEnsembles_USA": mean monthly change at 12-km resolution between 2070-2099 and 1971-2000 + # - "BCSD_GDODCPUCLLNL_USA": monthly time series at 1/8-degree resolution + # - "BCSD_GDODCPUCLLNL_Global": monthly time series at 1/2-degree resolution + # - "BCSD_NEX_USA": monthly time series at 30-arcsec resolution; requires live internet access + dataset1 = "CMIP5_BCSD_SageSeer_USA" +) + do.PriorCalculations <- c( "ExtendSoilDatafileToRequestedSoilLayers", 0, "EstimateConstantSoilTemperatureAtUpperAndLowerBoundaryAsMeanAnnualAirTemperature", 1, @@ -246,7 +252,7 @@ rownames(future_yrs) <- make.names(paste0("d", future_yrs[, "delta"], "yrs"), un #------Meta-information of input data datafile.windspeedAtHeightAboveGround <- 2 #SoilWat requires 2 m, but some datasets are at 10 m, e.g., NCEP/CRSF: this value checks windspeed height and if necessary converts to u2 adjust.soilDepth <- FALSE # [FALSE] fill soil layer structure from shallower layer(s) or [TRUE] adjust soil depth if there is no soil texture information for the lowest layers -requested_soil_layers <- seq(10, 100, by = 10) +requested_soil_layers <- c(5, 10, 20, 30, 40, 50, 60, 70, 80, 100, 150) increment_soiltemperature_deltaX_cm <- 5 # If SOILWAT soil temperature is simulated and the solution instable, then the soil profile layer width is increased by this value until a stable solution can be found or total failure is determined #Climate conditions @@ -262,21 +268,21 @@ climate.conditions <- c(climate.ambient) #Will be applied to each climate.conditions downscaling.method <- c("hybrid-delta-3mod") #one or multiple of "raw", "delta" (Hay et al. 2002), "hybrid-delta" (Hamlet et al. 2010), or "hybrid-delta-3mod" -downscaling.options <- list( - daily_ppt_limit = 1.5, # - monthly_limit = 1.5, # - ppt_type = "detailed", # either "detailed" or "simple" - correct_spline = "attempt", # one of "fail", "none" or "attempt"; only used if extrapol_type is using splines - # - "fail": downscaling fails if spline extrapolations fall outside estimated monthly extremes - # - "none": no correction for extrapolated monthly extreme values, but this will likely fail during correction of extreme daily PPT events - # - "attempt": repeated attempts with jittering data to fit spline extrapolations within estimated monthly extreme values - extrapol_type = "linear_Thermessl2012CC.QMv1b", # one of "linear_Boe", "linear_Thermessl2012CC.QMv1b", "linear_none", "tricub_fmm", "tricub_monoH.FC", "tricub_natural", "normal_anomalies" - # - "linear": Gudmundsson et al. 2012: "If new model values (e.g. from climate projections) are larger than the training values used to estimate the empirical CDF, the correction found for the highest quantile of the training period is used (Boe ?? et al., 2007; Theme??l et al., 2012)." - # - "tricub": I got really large output values, e.g., obs.hist = 54 cm, scen.fut = 64 cm, sbc.fut = 88 cm, hd.fut = 89 cm - # - "linear" (i.e., using Boe et al.'s correction) resulted for the same site to: obs.hist = 54 cm, scen.fut = 64 cm, sbc.fut = 75 cm, hd.fut = 75 cm - # - "normal", but no implemented in qmap: Tohver et al. 2014, Appendix A, p. 6: "... values that are outside the observed quantile map (e.g. in the early parts of the 20th century) are interpolated using standard anomalies (i.e. number of standard deviations from the mean) calculated for the observed data and GCM data. Although this approach ostensibly assumes a normal distribution, it was found during testing to be much more stable than attempts to use more sophisticated approaches. In particular, the use of Extreme Value Type I or Generalized Extreme Value distributions for extending the tail of the probability distributions were both found to be highly unstable in practice and introduced unacceptable daily extremes in isolated grid cells. These errors occur because of irregularities in the shapes of the CDFs for observed and GCM data, which relates in part to the relatively small sample size used to construct the monthly CDFs (i.e. n = 30)." - sigmaN = 6, # test whether data distributions are within sigmaN * sd of mean - PPTratioCutoff = 10 # above and below that value use additive instead of multiplicative adjustments for precipitation; 3 was too small -> resulting in too many medium-sized ppt-event +opt_DS <- list( + daily_ppt_limit = 1.5, # + monthly_limit = 1.5, # + ppt_type = "detailed", # either "detailed" or "simple" + fix_spline = "attempt", # one of "fail", "none" or "attempt"; only used if extrapol_type is using splines + # - "fail": downscaling fails if spline extrapolations fall outside estimated monthly extremes + # - "none": no correction for extrapolated monthly extreme values, but this will likely fail during correction of extreme daily PPT events + # - "attempt": repeated attempts with jittering data to fit spline extrapolations within estimated monthly extreme values + extrapol_type = "linear_Thermessl2012CC.QMv1b", # one of "linear_Boe", "linear_Thermessl2012CC.QMv1b", "linear_none", "tricub_fmm", "tricub_monoH.FC", "tricub_natural", "normal_anomalies" + # - "linear": Gudmundsson et al. 2012: "If new model values (e.g. from climate projections) are larger than the training values used to estimate the empirical CDF, the correction found for the highest quantile of the training period is used (Boe ?? et al., 2007; Theme??l et al., 2012)." + # - "tricub": I got really large output values, e.g., obs.hist = 54 cm, scen.fut = 64 cm, sbc.fut = 88 cm, hd.fut = 89 cm + # - "linear" (i.e., using Boe et al.'s correction) resulted for the same site to: obs.hist = 54 cm, scen.fut = 64 cm, sbc.fut = 75 cm, hd.fut = 75 cm + # - "normal", but no implemented in qmap: Tohver et al. 2014, Appendix A, p. 6: "... values that are outside the observed quantile map (e.g. in the early parts of the 20th century) are interpolated using standard anomalies (i.e. number of standard deviations from the mean) calculated for the observed data and GCM data. Although this approach ostensibly assumes a normal distribution, it was found during testing to be much more stable than attempts to use more sophisticated approaches. In particular, the use of Extreme Value Type I or Generalized Extreme Value distributions for extending the tail of the probability distributions were both found to be highly unstable in practice and introduced unacceptable daily extremes in isolated grid cells. These errors occur because of irregularities in the shapes of the CDFs for observed and GCM data, which relates in part to the relatively small sample size used to construct the monthly CDFs (i.e. n = 30)." + sigmaN = 6, # test whether data distributions are within sigmaN * sd of mean + PPTratioCutoff = 10 # above and below that value use additive instead of multiplicative adjustments for precipitation; 3 was too small -> resulting in too many medium-sized ppt-event ) #------Names of files that contain input data or treatment codes @@ -316,8 +322,6 @@ accountNSHemispheres_veg <- TRUE #if TRUE and latitude < 0 (i.e., southern hemi Index_RunInformation <- NULL #indices of columns of 'SWRunInformation', e.g, c(3, 7:9), or NULL, used for outputting SoilWat-run information in addition to create_treatments and climate scenario #------Select aggregated output: time scale and variable groups -#simulation_timescales is at least one of c("daily", "weekly", "monthly", "yearly") -simulation_timescales <- c("daily", "monthly", "yearly") # functions to aggregate output across years # don't delete names, only set \code{TRUE}/\code{FALSE} agg_funs <- list( @@ -382,6 +386,7 @@ output_aggregates <- c( #---Aggregation: Ecological dryness "dailyNRCS_SoilMoistureTemperatureRegimes", 0, #Requires at least soil layers at 10, 20, 30, 50, 60, 90 cm "dailyNRCS_Chambers2014_ResilienceResistance", 0, #Requires "dailyNRCS_SoilMoistureTemperatureRegimes" + "dailyNRCS_Maestas2016_ResilienceResistance", 0, "dailyWetDegreeDays", 1, "dailyThermalDrynessStartEnd", 1, "dailyThermalSWPConditionCount", 1, diff --git a/DESCRIPTION b/DESCRIPTION index e488dbd4..e1c232de 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -1,10 +1,10 @@ Package: rSWSF Title: SOILWAT Simulation Framework -Version: 1.6.3 -Date: 2016-09-28 +Version: 1.8.2 +Date: 2016-10-09 Authors@R: c(person("Daniel", "Schlaepfer", email = "daniel.schlaepfer@unibas.ch", role = c("aut", "cre")), - person("Caitlin", "Andrews", role = "ctb", email = "not@available.com"), - person("Ryan", "Murphy", role = "ctb", email = "not@available.com")) + person("Caitlin", "Andrews", role = "ctb", + person("Ryan", "Murphy", role = "ctb") Description: An R wrapper for simulation experiments with SOILWAT Depends: R (>= 3.1.0) @@ -13,6 +13,7 @@ Imports: RSQLite (>= 1.0.0), DBI (>= 0.4.1), circular (>= 0.4.7), + Rcpp (>= 0.12.7), SPEI (>= 1.6), testthat Suggests: @@ -34,6 +35,7 @@ Suggests: Hmisc (>= 3.17.4), qmap (>= 1.0.4), DaymetR (>= 0.1) +LinkingTo: Rcpp LazyData: true ByteCompile: true License: GPL (>= 3) diff --git a/NAMESPACE b/NAMESPACE index 6ae92683..cac1169d 100644 --- a/NAMESPACE +++ b/NAMESPACE @@ -1,2 +1,5 @@ +useDynLib(rSWSF) +importFrom(Rcpp, sourceCpp) + # Generated by roxygen2: do not edit by hand diff --git a/R/2_SWSF_p2of5_CreateDB_Tables_v51.R b/R/2_SWSF_p2of5_CreateDB_Tables_v51.R index 94178347..0f0441ad 100644 --- a/R/2_SWSF_p2of5_CreateDB_Tables_v51.R +++ b/R/2_SWSF_p2of5_CreateDB_Tables_v51.R @@ -151,7 +151,7 @@ PRAGMA_settings2 <- c(PRAGMA_settings1, "PRAGMA max_page_count=2147483646;", # returns the maximum page count "PRAGMA foreign_keys = ON;") #no return value -if(length(Tables) == 0) set_PRAGMAs(con, PRAGMA_settings2) +if (length(Tables) == 0) set_PRAGMAs(con, PRAGMA_settings2) headerTables <- c("runs", "sqlite_sequence", "header", "run_labels", "scenario_labels", "sites", "experimental_labels", "treatments", "simulation_years", "weatherfolders", "aggregating_functions", "aggregating_timewindows", @@ -230,7 +230,7 @@ if (length(Tables) == 0 || cleanDB) { ##########Create table experimental_labels only if using experimentals - if(useExperimentals) { + if (useExperimentals) { RSQLite::dbGetQuery(con, "CREATE TABLE experimental_labels(id INTEGER PRIMARY KEY AUTOINCREMENT, label TEXT UNIQUE NOT NULL);") RSQLite::dbBegin(con) RSQLite::dbGetPreparedQuery(con, "INSERT INTO experimental_labels VALUES(NULL, :label);", @@ -241,23 +241,23 @@ if (length(Tables) == 0 || cleanDB) { #If LookupWeatherFolder is ON we need to make sure all of the weather folders are in weatherfolders table #TODO: WeatherFolder update - if(any(create_treatments=="LookupWeatherFolder")) { + if (any(create_treatments=="LookupWeatherFolder")) { #which ones are not in SWRunInformation$WeatherFolder #make a combined list of experimentals and treatments LookupWeatherFolder List #first add any from the experimentals table if its turned on #next add any from the treatments table if its turned on treatments_lookupweatherfolders <- character(0) - if(any(names(sw_input_treatments_use[sw_input_treatments_use])=="LookupWeatherFolder")) { + if (any(names(sw_input_treatments_use[sw_input_treatments_use])=="LookupWeatherFolder")) { treatments_lookupweatherfolders <- c(treatments_lookupweatherfolders, sw_input_treatments$LookupWeatherFolder[runIDs_sites]) } - if(any(create_experimentals=="LookupWeatherFolder")) { + if (any(create_experimentals=="LookupWeatherFolder")) { treatments_lookupweatherfolders <- c(treatments_lookupweatherfolders, sw_input_experimentals$LookupWeatherFolder[runIDs_sites]) } #Remove NA because that defaults to sites default weatherFolder also make sure each folder is unique treatments_lookupweatherfolders <- treatments_lookupweatherfolders[!is.na(treatments_lookupweatherfolders)] treatments_lookupweatherfolders <- unique(treatments_lookupweatherfolders) - if(length(treatments_lookupweatherfolders) == 0){ + if (length(treatments_lookupweatherfolders) == 0) { print("LookupWeatherFolder is turned on in treatments or experimentals or both but is not used") } else { #make a temp data.frame of a column NA's and a column of folder names @@ -265,7 +265,7 @@ if (length(Tables) == 0 || cleanDB) { #Get the id from sites table if the folder is in it LookupWeatherFolder_index$id <- getSiteIds(con, LookupWeatherFolder_index$folder) #if there are any NA's we need to add those to the weatherfolder db table and update its id in our lookuptable for weatherfolder - if(any(is.na(LookupWeatherFolder_index$id))) { + if (any(is.na(LookupWeatherFolder_index$id))) { #get max id from weatherfolders table temp<-is.na(LookupWeatherFolder_index$id) weatherfolders_index <- as.numeric(RSQLite::dbGetQuery(con,"SELECT MAX(id) FROM weatherfolders;"))+1 @@ -279,20 +279,20 @@ if (length(Tables) == 0 || cleanDB) { } # get unique rows from both treatments and experimentals - if(useExperimentals) {#Only use experimentals if there is something in it + if (useExperimentals) {#Only use experimentals if there is something in it #Are all the columns NA - if(all(temp<-is.na(sw_input_experimentals[,create_experimentals]))) stop("All Columns in experimentals table are NA") - if(any(apply(temp,MARGIN=2, function(x) all(x)))) warning("One ore more columns in experimentals table are turned on with no values or only with NA.") + if (all(temp<-is.na(sw_input_experimentals[,create_experimentals]))) stop("All Columns in experimentals table are NA") + if (any(apply(temp,MARGIN=2, function(x) all(x)))) warning("One ore more columns in experimentals table are turned on with no values or only with NA.") db_experimentals <- unique(sw_input_experimentals[,create_experimentals]) #note experimentals should be unique if we have less rows then the original then lets throw an Error stopifnot(nrow(db_experimentals) == nrow(sw_input_experimentals)) } else { #experimentals does not have any rows. Are any of the create_experimentals turned on - if(length(create_experimentals) > 0 && expN == 0) stop("No rows in experimentals table but columns are turned on") - if(expN > 0 && length(create_experimentals)==0) warning("Rows in experimentals are not being used.") + if (length(create_experimentals) > 0 && expN == 0) stop("No rows in experimentals table but columns are turned on") + if (expN > 0 && length(create_experimentals)==0) warning("Rows in experimentals are not being used.") } - if(useTreatments) { + if (useTreatments) { # Note: invariant to 'include_YN', i.e., do not subset 'SWRunInformation' # we only need the columns that are turned on and not in experimentals. Experimentals over write. db_treatments <- unique(df<-sw_input_treatments[, create_treatments[!(create_treatments %in% create_experimentals)], drop=FALSE]) @@ -311,8 +311,8 @@ if (length(Tables) == 0 || cleanDB) { } #Replace the LookupWeatherFolder with the LookupWeatherFolder_id in either db_experimentals or db_treatments - if(any(create_treatments=="LookupWeatherFolder")) { - if(any(create_experimentals=="LookupWeatherFolder")) { + if (any(create_treatments=="LookupWeatherFolder")) { + if (any(create_experimentals=="LookupWeatherFolder")) { #rename the column colnames(db_experimentals)[where(create_experimentals=="LookupWeatherFolder")] <- "LookupWeatherFolder_id" #get the id numbers for those columns and replace text @@ -325,11 +325,11 @@ if (length(Tables) == 0 || cleanDB) { } } useTreatmentWeatherFolder <- FALSE - if(useExperimentals | useTreatments) { + if (useExperimentals | useTreatments) { #Create a table to hold the values going into the database temp_numberRows <- ifelse(useExperimentals,nrow(db_experimentals)*db_treatments_rows,nrow(db_treatments)) temp_numberColumns <- ifelse(useExperimentals,3,2)+length(create_treatments) - temp_columnNames <- c("id",if(useExperimentals) c("experimental_id"),"simulation_years_id",create_treatments) + temp_columnNames <- c("id",if (useExperimentals) c("experimental_id"),"simulation_years_id",create_treatments) db_combined_exp_treatments <- data.frame(matrix(data=NA, nrow=temp_numberRows, ncol=temp_numberColumns,dimnames=list(NULL, temp_columnNames)),stringsAsFactors = FALSE) rm(temp_numberColumns,temp_columnNames,temp_numberRows) @@ -344,9 +344,9 @@ if (length(Tables) == 0 || cleanDB) { ###################### #Get the column types from the proper tables db_treatments_column_types[,2] <- sapply(db_treatments_column_types[,1], function(columnName) { - if(columnName %in% create_experimentals) { + if (columnName %in% create_experimentals) { RSQLite::dbDataType(con, sw_input_experimentals[,columnName]) - } else if(columnName %in% create_treatments[!(create_treatments %in% create_experimentals)]) { + } else if (columnName %in% create_treatments[!(create_treatments %in% create_experimentals)]) { RSQLite::dbDataType(con, sw_input_treatments[,columnName]) } }) @@ -354,43 +354,43 @@ if (length(Tables) == 0 || cleanDB) { #Finalize db_treatments_column_types #remove YearStart or YearEnd db_treatments_years <- NULL - if(any(db_treatments_column_types$column == "YearStart")) { + if (any(db_treatments_column_types$column == "YearStart")) { db_treatments_years <- rbind(db_treatments_years, db_treatments_column_types[which(db_treatments_column_types$column == "YearStart"),]) db_treatments_column_types <- db_treatments_column_types[-which(db_treatments_column_types$column == "YearStart"),] } - if(any(db_treatments_column_types$column == "YearEnd")) { + if (any(db_treatments_column_types$column == "YearEnd")) { db_treatments_years <- rbind(db_treatments_years, db_treatments_column_types[which(db_treatments_column_types$column == "YearEnd"),]) db_treatments_column_types <- db_treatments_column_types[-which(db_treatments_column_types$column == "YearEnd"),] } #rename weather folder column name and create the fk fk_LookupWeatherFolder <- "" - if(any(create_treatments=="LookupWeatherFolder")) { + if (any(create_treatments=="LookupWeatherFolder")) { useTreatmentWeatherFolder <- TRUE db_treatments_column_types[which(db_treatments_column_types[,1] == "LookupWeatherFolder"),1:2] <- c("LookupWeatherFolder_id","INTEGER") colnames(db_combined_exp_treatments)[-(1:2)] <- db_treatments_column_types[,1] fk_LookupWeatherFolder <- ", FOREIGN KEY(LookupWeatherFolder_id) REFERENCES weatherfolders(id)" } #Create the table - RSQLite::dbGetQuery(con, paste("CREATE TABLE treatments(id INTEGER PRIMARY KEY AUTOINCREMENT, ",if(useExperimentals) "experimental_id INTEGER,", " simulation_years_id INTEGER, ", paste(db_treatments_column_types[,1], " ", db_treatments_column_types[,2], sep="", collapse =", "), if(useExperimentals || fk_LookupWeatherFolder!="") ", ", if(useExperimentals) "FOREIGN KEY(experimental_id) REFERENCES experimental_labels(id)",if(fk_LookupWeatherFolder != "") ", ",fk_LookupWeatherFolder,");", sep="")) + RSQLite::dbGetQuery(con, paste("CREATE TABLE treatments(id INTEGER PRIMARY KEY AUTOINCREMENT, ",if (useExperimentals) "experimental_id INTEGER,", " simulation_years_id INTEGER, ", paste(db_treatments_column_types[,1], " ", db_treatments_column_types[,2], sep="", collapse =", "), if (useExperimentals || fk_LookupWeatherFolder!="") ", ", if (useExperimentals) "FOREIGN KEY(experimental_id) REFERENCES experimental_labels(id)",if (fk_LookupWeatherFolder != "") ", ",fk_LookupWeatherFolder,");", sep="")) #Lets put in the treatments into combined. This will repeat the reduced rows of treatments into combined - if(useTreatments) { + if (useTreatments) { i_start <- which(colnames(db_treatments) == "YearStart") i_end <- which(colnames(db_treatments) == "YearEnd") i_use <- 1:ncol(db_treatments) - if(length(i_start) > 0) i_use <- i_use[-i_start] - if(length(i_end) > 0) i_use <- i_use[-i_end] + if (length(i_start) > 0) i_use <- i_use[-i_start] + if (length(i_end) > 0) i_use <- i_use[-i_end] db_combined_exp_treatments[,db_treatments_column_types[db_treatments_column_types[,3]==0,1]] <- db_treatments[, i_use] #Handle StartYear and EndYear separately - if(length(i_start) > 0 && !is.null(db_treatments_years) && db_treatments_years[db_treatments_years$column == "YearStart", "table"] == 0) db_combined_exp_treatments[, colnames(db_combined_exp_treatments) == "YearStart"] <- db_treatments[, i_start] - if(length(i_end) > 0 && !is.null(db_treatments_years) && db_treatments_years[db_treatments_years$column == "YearEnd", "table"] == 0) db_combined_exp_treatments[, colnames(db_combined_exp_treatments) == "YearEnd"] <- db_treatments[, i_end] + if (length(i_start) > 0 && !is.null(db_treatments_years) && db_treatments_years[db_treatments_years$column == "YearStart", "table"] == 0) db_combined_exp_treatments[, colnames(db_combined_exp_treatments) == "YearStart"] <- db_treatments[, i_start] + if (length(i_end) > 0 && !is.null(db_treatments_years) && db_treatments_years[db_treatments_years$column == "YearEnd", "table"] == 0) db_combined_exp_treatments[, colnames(db_combined_exp_treatments) == "YearEnd"] <- db_treatments[, i_end] } - if(useExperimentals) { + if (useExperimentals) { exp_start_rows<-seq(from=1,to=db_treatments_rows*nrow(db_experimentals),by=db_treatments_rows) #Insert data into our new data.frame - for(start in exp_start_rows) { + for (start in exp_start_rows) { #Get experimental_label_id db_combined_exp_treatments[start:(start+db_treatments_rows-1),2] <- which(exp_start_rows==start) #insert all of the rows from experimentals @@ -403,16 +403,16 @@ if (length(Tables) == 0 || cleanDB) { } #if the column startYear or endYear are present move over to simulation_years - if(any(colnames(db_combined_exp_treatments) == "YearStart") || any(colnames(db_combined_exp_treatments) == "YearEnd")) { + if (any(colnames(db_combined_exp_treatments) == "YearStart") || any(colnames(db_combined_exp_treatments) == "YearEnd")) { simulation_years<-matrix(data=NA, nrow=nrow(db_combined_exp_treatments), ncol = 4, dimnames=list(NULL,c("id","simulationStartYear","StartYear","EndYear"))) #Get from treatments or get from settings - if(any(colnames(db_combined_exp_treatments) == "YearStart")) { + if (any(colnames(db_combined_exp_treatments) == "YearStart")) { simulation_years[, "simulationStartYear"] <- db_combined_exp_treatments$YearStart db_combined_exp_treatments <- db_combined_exp_treatments[,-which(colnames(db_combined_exp_treatments) == "YearStart")] } else { simulation_years[, "simulationStartYear"] <- simstartyr } - if(any(colnames(db_combined_exp_treatments) == "YearEnd")) { + if (any(colnames(db_combined_exp_treatments) == "YearEnd")) { simulation_years[, "EndYear"] <- db_combined_exp_treatments$YearEnd db_combined_exp_treatments <- db_combined_exp_treatments[,-which(colnames(db_combined_exp_treatments) == "YearEnd")] } else { @@ -422,7 +422,7 @@ if (length(Tables) == 0 || cleanDB) { unique_simulation_years <- unique(simulation_years) #each row is unique so add id to db_combined - if(nrow(unique_simulation_years)==nrow(simulation_years)) { + if (nrow(unique_simulation_years)==nrow(simulation_years)) { id <- seq_len(nrow(unique_simulation_years)) unique_simulation_years<-cbind(id,unique_simulation_years[,2:4]) db_combined_exp_treatments$simulation_years_id <- unique_simulation_years[,1] @@ -464,7 +464,7 @@ if (length(Tables) == 0 || cleanDB) { # Note: invariant to 'include_YN', i.e., do not subset 'SWRunInformation' RSQLite::dbGetQuery(con, "CREATE TABLE run_labels(id INTEGER PRIMARY KEY AUTOINCREMENT, label TEXT UNIQUE NOT NULL);") RSQLite::dbBegin(con) - if(useExperimentals) { + if (useExperimentals) { RSQLite::dbGetPreparedQuery(con, "INSERT INTO run_labels VALUES(NULL, :label);", bind.data = data.frame(label = paste(formatC(SWRunInformation$site_id, width = counter.digitsN, format = "d", flag = "0"), rep(sw_input_experimentals[, 1], each = runsN_master), labels, @@ -517,7 +517,7 @@ if (length(Tables) == 0 || cleanDB) { db_runs$treatment_id <- rep(treatments_unique_map,each=scenario_No) } } else { - if(useExperimentals) { + if (useExperimentals) { i_exp<-as.vector(matrix(data=exp_start_rows,nrow=runsN_master,ncol=expN,byrow=T)) db_runs$treatment_id <- rep(i_exp,each=scenario_No) } else { @@ -618,30 +618,30 @@ if (length(Tables) == 0 || cleanDB) { } #4. - if (any(simulation_timescales == "monthly") && aon$input_Phenology) { + if (aon$input_Phenology) { temp <- c(temp, paste0("SWinput.GrowingSeason.", c("Start", "End"), "_month_const")) } #5. - if(aon$input_TranspirationCoeff){ - if(daily_lyr_agg[["do"]]){ + if (aon$input_TranspirationCoeff) { + if (daily_lyr_agg[["do"]]) { ltemp <- paste("L0to", daily_lyr_agg[["first_cm"]], "cm", sep="") - if(is.null(daily_lyr_agg[["second_cm"]])) { + if (is.null(daily_lyr_agg[["second_cm"]])) { ltemp <- c(ltemp, paste("L", daily_lyr_agg[["first_cm"]], "toSoilDepth", sep="")) - } else if(is.numeric(daily_lyr_agg[["second_cm"]])){ + } else if (is.numeric(daily_lyr_agg[["second_cm"]])) { ltemp <- c(ltemp, paste("L", daily_lyr_agg[["first_cm"]], "to", daily_lyr_agg[["second_cm"]], "cm", sep="")) } - if(is.null(daily_lyr_agg[["third_cm"]])) { + if (is.null(daily_lyr_agg[["third_cm"]])) { ltemp <- c(ltemp, paste("L", daily_lyr_agg[["second_cm"]], "toSoilDepth", sep="")) - } else if(is.na(daily_lyr_agg[["third_cm"]])){ - } else if(is.numeric(daily_lyr_agg[["third_cm"]])){ + } else if (is.na(daily_lyr_agg[["third_cm"]])) { + } else if (is.numeric(daily_lyr_agg[["third_cm"]])) { ltemp <- c(ltemp, paste("L", daily_lyr_agg[["second_cm"]], "to", daily_lyr_agg[["third_cm"]], "cm", sep="")) } - if(is.null(daily_lyr_agg[["fourth_cm"]])) { + if (is.null(daily_lyr_agg[["fourth_cm"]])) { ltemp <- c(ltemp, paste("L", daily_lyr_agg[["third_cm"]], "toSoilDepth", sep="")) - } else if(is.na(daily_lyr_agg[["fourth_cm"]])){ - } else if(is.numeric(daily_lyr_agg[["fourth_cm"]])){ + } else if (is.na(daily_lyr_agg[["fourth_cm"]])) { + } else if (is.numeric(daily_lyr_agg[["fourth_cm"]])) { ltemp <- c(ltemp, paste("L", daily_lyr_agg[["third_cm"]], "to", daily_lyr_agg[["fourth_cm"]], "cm", sep="")) } ltemp <- c(ltemp, paste("NA", (length(ltemp)+1):SoilLayer_MaxNo, sep="")) @@ -664,7 +664,7 @@ if (length(Tables) == 0 || cleanDB) { } #6. - if(aon$input_ClimatePerturbations) { + if (aon$input_ClimatePerturbations) { temp <- c(temp, paste0(rep(paste0("SWinput.ClimatePerturbations.", c("PrcpMultiplier.m", "TmaxAddand.m", "TminAddand.m")), each = 12), @@ -676,41 +676,41 @@ if (length(Tables) == 0 || cleanDB) { ##############################################################---Aggregation: Climate and weather---############################################################## #7. - if(any(simulation_timescales=="yearly") & aon$yearlyTemp){ + if (aon$yearlyTemp) { temp <- c(temp, "MAT_C") } #8. - if(any(simulation_timescales=="yearly") & aon$yearlyPPT){ + if (aon$yearlyPPT) { temp <- c(temp, c("MAP_mm", "SnowOfPPT_fraction")) } #9. - if(any(simulation_timescales=="daily") & any(simulation_timescales=="yearly") & aon$dailySnowpack){ + if (aon$dailySnowpack) { temp <- c(temp, "RainOnSnowOfMAP_fraction") } #10. - if(any(simulation_timescales=="daily") & aon$dailySnowpack){ + if (aon$dailySnowpack) { temp <- c(temp, paste0("Snowcover.NSadj.", c("Peak_doy", "LongestContinuous.LastDay_doy", "LongestContinuous.Duration_days", "Total_days", "Peak_mmSWE"))) } #11 - if(any(simulation_timescales=="daily") & aon$dailyFrostInSnowfreePeriod){ + if (aon$dailyFrostInSnowfreePeriod) { temp <- c(temp, paste0("TminBelow", fieldtag_Tmin_crit_C, "withoutSnowpack_days")) } #12 - if(any(simulation_timescales=="daily") & aon$dailyHotDays){ + if (aon$dailyHotDays) { temp <- c(temp, paste0("TmaxAbove", fieldtag_Tmax_crit_C, "_days")) } #12b - if (any(simulation_timescales == "daily") && aon$dailyWarmDays) { + if (aon$dailyWarmDays) { temp <- c(temp, paste0("TmeanAbove", fieldtag_Tmean_crit_C, "_days")) } #13 - if(any(simulation_timescales=="daily") & aon$dailyPrecipitationEventSizeDistribution){ + if (aon$dailyPrecipitationEventSizeDistribution) { bins.summary <- (0:6) * bin.prcpSizes temp <- c(temp, paste0("PrcpEvents.Annual", c("_count", @@ -721,12 +721,12 @@ if (length(Tables) == 0 || cleanDB) { } #15 - if(any(simulation_timescales=="yearly") & aon$yearlyPET){ + if (aon$yearlyPET) { temp <- c(temp, "PET_mm") } #16 - if(any(simulation_timescales=="monthly") & aon$monthlySeasonalityIndices){ + if (aon$monthlySeasonalityIndices) { temp <- c(temp, paste0("Seasonality.monthly", c("PETandSWPtopLayers", "PETandSWPbottomLayers", "TandPPT"), "_PearsonCor_mean")) @@ -735,7 +735,7 @@ if (length(Tables) == 0 || cleanDB) { #---Aggregation: Climatic dryness #17 - if(any(simulation_timescales=="yearly") & any(simulation_timescales=="monthly") & aon$yearlymonthlyTemperateDrylandIndices){ + if (aon$yearlymonthlyTemperateDrylandIndices) { temp2 <- c("UNAridityIndex", "TrewarthaD", "TemperateDryland12") temp <- c(temp, paste0(c(paste0(temp2, ".Normals"), paste0(temp2, ".Annual")), @@ -744,14 +744,14 @@ if (length(Tables) == 0 || cleanDB) { } #18 - if(any(simulation_timescales=="yearly") & aon$yearlyDryWetPeriods){ + if (aon$yearlyDryWetPeriods) { temp <- c(temp, paste0("SpellsOfYears_", c("Below", "Above"), "MeanAnnualPrecip_Duration_years")) } #19 - if(any(simulation_timescales=="daily") & aon$dailyWeatherGeneratorCharacteristics){ + if (aon$dailyWeatherGeneratorCharacteristics) { temp2 <- c("WetSpellDuration", "DrySpellDuration", "TempAir.StDevOfDailyValues") temp <- c(temp, paste0(rep(temp2, each = 12), ".m", st_mo, "_", @@ -759,7 +759,7 @@ if (length(Tables) == 0 || cleanDB) { } #20 - if(any(simulation_timescales=="daily") & aon$dailyPrecipitationFreeEventDistribution){ + if (aon$dailyPrecipitationFreeEventDistribution) { bins.summary <- (0:3) * bin.prcpfreeDurations temp <- c(temp, paste0("DrySpells.Annual", c("_count", @@ -770,7 +770,7 @@ if (length(Tables) == 0 || cleanDB) { } #21 - if(any(simulation_timescales=="monthly") & aon$monthlySPEIEvents){ + if (aon$monthlySPEIEvents) { temp <- c(temp, paste0(paste0("SPEI.", rep(SPEI_tscales_months, each = 4), "monthsScale.", "Spell", rep(c("Pos.", "Neg."), each = 2)), @@ -779,14 +779,14 @@ if (length(Tables) == 0 || cleanDB) { #---Aggregation: Climatic control #22 - if(any(simulation_timescales=="monthly") & aon$monthlyPlantGrowthControls){ + if (aon$monthlyPlantGrowthControls) { temp <- c(temp, paste0("NemaniEtAl2003.NPPControl.", c("Temperature", "Water", "Radiation"), "_fraction")) } #23 - if(any(simulation_timescales=="daily") & aon$dailyC4_TempVar){ + if (aon$dailyC4_TempVar) { temp <- c(temp, paste0("TeeriEtAl1976.NSadj.", c("TempAirMin.7thMonth_C", "FreezeFreeGrowingPeriod_days", @@ -794,19 +794,19 @@ if (length(Tables) == 0 || cleanDB) { } #24 - if(any(simulation_timescales=="daily") & aon$dailyDegreeDays){ + if (aon$dailyDegreeDays) { temp <- c(temp, paste0("DegreeDays.Base", DegreeDayBase, "C.dailyTmean_Cdays")) } ##############################################################---Aggregation: Yearly water balance---############################################################## #27.0 - if(any(simulation_timescales=="yearly") & aon$yearlyAET){ + if (aon$yearlyAET) { temp <- c(temp, "AET_mm") } #27 - if(any(simulation_timescales=="yearly") & aon$yearlyWaterBalanceFluxes) { + if (aon$yearlyWaterBalanceFluxes) { temp <- c(temp, c("Rain_mm", "Rain.ReachingSoil_mm", "Snowfall_mm", "Snowmelt_mm", "Snowloss_mm", "Interception.Total_mm", "Interception.Vegetation_mm", "Interception.Litter_mm", @@ -823,7 +823,7 @@ if (length(Tables) == 0 || cleanDB) { #27.2 - if(any(simulation_timescales=="daily") & aon$dailySoilWaterPulseVsStorage){ + if (aon$dailySoilWaterPulseVsStorage) { temp <- c(temp, paste0("WaterExtractionSpell_MeanContinuousDuration_L", lmax, "_days"), paste0("WaterExtractionSpell_AnnualSummedExtraction_L", lmax, "_mm")) @@ -831,44 +831,44 @@ if (length(Tables) == 0 || cleanDB) { ##############################################################---Aggregation: Daily extreme values---############################################################## #28 - if(any(simulation_timescales=="daily") & aon$dailyTranspirationExtremes) { + if (aon$dailyTranspirationExtremes) { temp <- c(temp, paste0("Transpiration.", c("DailyMax", "DailyMin"), "_mm"), paste0("Transpiration.", c("DailyMax", "DailyMin"), "_doy")) } #29 - if(any(simulation_timescales=="daily") & aon$dailyTotalEvaporationExtremes) { + if (aon$dailyTotalEvaporationExtremes) { temp <- c(temp, paste0("Evaporation.Total.", c("DailyMax", "DailyMin"), "_mm"), paste0("Evaporation.Total.", c("DailyMax", "DailyMin"), "_doy")) } #30 - if(any(simulation_timescales=="daily") & aon$dailyDrainageExtremes) { + if (aon$dailyDrainageExtremes) { temp <- c(temp, paste0("DeepDrainage.", c("DailyMax", "DailyMin"), "_mm"), paste0("DeepDrainage.", c("DailyMax", "DailyMin"), "_doy")) } #31 - if(any(simulation_timescales=="daily") & aon$dailyInfiltrationExtremes) { + if (aon$dailyInfiltrationExtremes) { temp <- c(temp, paste0("Infiltration.", c("DailyMax", "DailyMin"), "_mm"), paste0("Infiltration.", c("DailyMax", "DailyMin"), "_doy")) } #32 - if(any(simulation_timescales=="daily") & aon$dailyAETExtremes) { + if (aon$dailyAETExtremes) { temp <- c(temp, paste0("AET.", c("DailyMax", "DailyMin"), "_mm"), paste0("AET.", c("DailyMax", "DailyMin"), "_doy")) } #33 - if(any(simulation_timescales=="daily") & aon$dailySWPextremes){ + if (aon$dailySWPextremes) { temp <- c(temp, paste0("SWP.", rep(c("topLayers.", "bottomLayers."), each = 2), rep(c("DailyMax", "DailyMin"), times = 2), rep(c("_MPa", "_doy"), each = 4))) } #34 - if(any(simulation_timescales=="daily") & aon$dailyRechargeExtremes){ + if (aon$dailyRechargeExtremes) { temp <- c(temp, paste0("RelRecharge.", rep(c("topLayers.", "bottomLayers."), each = 2), rep(c("DailyMax", "DailyMin"), times = 2), @@ -878,7 +878,7 @@ if (length(Tables) == 0 || cleanDB) { ##############################################################---Aggregation: Ecological dryness---############################################################## #35a -if(any(simulation_timescales=="daily") && aon$dailyNRCS_SoilMoistureTemperatureRegimes){ + if (aon$dailyNRCS_SoilMoistureTemperatureRegimes) { # abbreviations: # - GT = greater than; LT = less than; EQ = equal # - MCS = MoistureControlSection; ACS = AnhydrousControlSection @@ -917,15 +917,21 @@ if(any(simulation_timescales=="daily") && aon$dailyNRCS_SoilMoistureTemperatureR c("Anhydrous", "Aridic", "Udic", "Ustic", "Xeric")))) } #35b - if(any(simulation_timescales=="daily") && aon$dailyNRCS_Chambers2014_ResilienceResistance && aon$dailyNRCS_SoilMoistureTemperatureRegimes){ - cats <- c("Low", "ModeratelyLow", "Moderate", "ModeratelyHigh", "High") - temp <- c(temp, paste0("NRCS_Sagebrush", + if (aon$dailyNRCS_Chambers2014_ResilienceResistance) { + cats <- c("Low", "ModeratelyLow", "Moderate", "ModeratelyHigh", "High") + temp <- c(temp, paste0("NRCS_Chambers2014_Sagebrush", rep(c("Resilience", "Resistance"), each = length(cats)), "_", cats)) - rm(cats) - } + rm(cats) + } + + #35c + if (aon$dailyNRCS_Maestas2016_ResilienceResistance) { + temp <- c(temp, paste0("NRCS_Maestas2016_SagebrushRR_", c("Low", "Moderate", "High"))) + } + #35.2 - if(any(simulation_timescales=="daily") & aon$dailyWetDegreeDays){ + if (aon$dailyWetDegreeDays) { temp <- c(temp, paste0("WetDegreeDays.SWPcrit", rep(fieldtag_SWPcrit_MPa, times = 3), rep(c(".topLayers", ".bottomLayers", ".anyLayer"), @@ -933,7 +939,7 @@ if(any(simulation_timescales=="daily") && aon$dailyNRCS_SoilMoistureTemperatureR } #35.3 - if(any(simulation_timescales=="daily") && aon$dailyThermalDrynessStartEnd){ + if(aon$dailyThermalDrynessStartEnd){ temp <- c(temp, paste0("ThermalDrySoilPeriods_SWPcrit", rep(fieldtag_SWPcrit_MPa, each = 2), "_NSadj_", @@ -944,7 +950,7 @@ if(any(simulation_timescales=="daily") && aon$dailyNRCS_SoilMoistureTemperatureR } #35.4 - if(any(simulation_timescales=="daily") && aon$dailyThermalSWPConditionCount){ + if (aon$dailyThermalSWPConditionCount) { temp <- c(temp, paste0("SoilPeriods_Warm", rep(paste0(rep(c("Dry", "Wet"), times = 3), "_", rep(c("allLayers", "topLayer", "bottomLayer"), each = 2)), @@ -956,33 +962,33 @@ if(any(simulation_timescales=="daily") && aon$dailyNRCS_SoilMoistureTemperatureR #TODO(drs): progress state #36 - if(any(simulation_timescales=="monthly") & aon$monthlySWPdryness){ + if (aon$monthlySWPdryness) { temp <- c(temp, paste("DrySoilPeriods.SWPcrit", rep(fieldtag_SWPcrit_MPa, times=2), ".NSadj.", rep(c("topLayers", "bottomLayers"), each=length(SWPcrit_MPa)), ".Duration.Total_months_mean", sep=""), paste("DrySoilPeriods.SWPcrit", rep(fieldtag_SWPcrit_MPa, times=2), ".NSadj.", rep(c("topLayers", "bottomLayers"), each=length(SWPcrit_MPa)), ".Start_month_mean", sep="")) } #37 - if(any(simulation_timescales=="daily") & aon$dailySWPdrynessANDwetness){ + if (aon$dailySWPdrynessANDwetness) { temp <- c(temp, paste(rep(c("WetSoilPeriods", "DrySoilPeriods"), each=8), ".SWPcrit", rep(fieldtag_SWPcrit_MPa, each=16), ".NSadj.", c(rep(c("topLayers", "bottomLayers"), times=4), rep(rep(c("topLayers", "bottomLayers"), each=2), times=2)), rep(c(".AnyLayerWet.", ".AllLayersWet.", ".AllLayersDry.", ""), each=4), c(rep(rep(c("Duration.Total_days", "Duration.LongestContinuous_days"), each=2), times=2), rep(c("Duration.Total_days", "Duration.LongestContinuous_days"), times=2), rep(c(".PeriodsForAtLeast10Days.Start_doy", ".PeriodsForAtLeast10Days.End_doy"), times=2)), "_mean", sep="")) } #38 - if(any(simulation_timescales=="daily") & aon$dailySuitablePeriodsDuration){ + if (aon$dailySuitablePeriodsDuration) { quantiles <- c(0.05, 0.5, 0.95) temp <- c(temp, paste("ThermalSnowfreeWetPeriods.SWPcrit", rep(paste(rep(fieldtag_SWPcrit_MPa, each=2), rep(c(".topLayers", ".bottomLayers"), times=length(SWPcrit_MPa)), sep=""), each=length(quantiles)), "_Duration_days_quantile", rep(quantiles, times=2), sep="")) rm(quantiles) } #39 - if(any(simulation_timescales=="daily") & aon$dailySuitablePeriodsAvailableWater){ + if (aon$dailySuitablePeriodsAvailableWater) { temp <- c(temp, paste("ThermalSnowfreeWetPeriods.SWPcrit", rep(fieldtag_SWPcrit_MPa, each=2), rep(c(".topLayers", ".bottomLayers"), times=length(SWPcrit_MPa)), "_AvailableWater_mm_mean", sep="")) } #40 - if(any(simulation_timescales=="daily") & aon$dailySuitablePeriodsDrySpells){ + if (aon$dailySuitablePeriodsDrySpells) { temp <- c(temp, paste("ThermalSnowfreeDryPeriods.SWPcrit", rep(paste(rep(fieldtag_SWPcrit_MPa, each=2), rep(c(".topLayers", ".bottomLayers"), times=length(SWPcrit_MPa)), sep=""), each=4), c("_DrySpellsAllLayers_meanDuration_days_mean", "_DrySpellsAllLayers_maxDuration_days_mean", "_DrySpellsAllLayers_Total_days_mean", "_DrySpellsAtLeast10DaysAllLayers_Start_doy_mean"), sep="")) } #41 - if(any(simulation_timescales=="daily") & aon$dailySWPdrynessDurationDistribution){ + if (aon$dailySWPdrynessDurationDistribution) { deciles <- (0:10)*10/100 quantiles <- (0:4)/4 mo_seasons <- matrix(data=c(12,1:11), ncol=3, nrow=4, byrow=TRUE) @@ -1001,7 +1007,7 @@ if(any(simulation_timescales=="daily") && aon$dailyNRCS_SoilMoistureTemperatureR } #42 - if(any(simulation_timescales=="daily") && aon$dailySWPdrynessEventSizeDistribution) { + if (aon$dailySWPdrynessEventSizeDistribution) { binSize <- c(1, 8, 15, 29, 57, 183, 367) #closed interval lengths in [days] within a year; NOTE: n_variables is set for binsN == 4 binsN <- length(binSize) - 1 binTitle <- paste("SizeClass", paste(binSize[-length(binSize)], binSize[-1]-1, sep="to") ,"days", sep="") @@ -1017,7 +1023,7 @@ if(any(simulation_timescales=="daily") && aon$dailyNRCS_SoilMoistureTemperatureR } #43 - if(any(simulation_timescales=="daily") && aon$dailySWPdrynessIntensity) { + if (aon$dailySWPdrynessIntensity) { temp <- c(temp, paste0("DrySoilPeriods.SWPcrit", rep(fieldtag_SWPcrit_MPa, each = 4 * 2), ".MissingWater.", @@ -1027,7 +1033,7 @@ if(any(simulation_timescales=="daily") && aon$dailyNRCS_SoilMoistureTemperatureR } #43.2 - if(any(simulation_timescales=="daily") && aon$dailyThermalDrynessStress){ + if (aon$dailyThermalDrynessStress) { temp <- c(temp, paste0("Mean10HottestDays_VPD_kPa", c("_mean", "_max", @@ -1042,78 +1048,78 @@ if(any(simulation_timescales=="daily") && aon$dailyNRCS_SoilMoistureTemperatureR ##############################################################---Aggregation: Mean monthly values---############################################################## #44 - if(any(simulation_timescales=="monthly") & aon$monthlyTemp){ + if (aon$monthlyTemp) { temp <- c(temp, paste0("TempAir.m", st_mo, "_C")) } #45 - if(any(simulation_timescales=="monthly") & aon$monthlyPPT){ + if (aon$monthlyPPT) { temp <- c(temp, paste0("Precip.m", st_mo, "_mm")) } #46 - if(any(simulation_timescales=="monthly") & aon$monthlySnowpack){ + if (aon$monthlySnowpack) { temp <- c(temp, paste0("Snowpack.m", st_mo, "_mmSWE")) } #47 - if(any(simulation_timescales == "monthly") & aon$monthlySoilTemp) { + if (aon$monthlySoilTemp) { temp <- c(temp, paste0("TempSoil.", paste0(rep(c("top", "bottom"), each = 12), "Layers.m", st_mo), "_C")) } #48 - if(any(simulation_timescales=="monthly") & aon$monthlyRunoff){ + if (aon$monthlyRunoff) { temp <- c(temp, paste0("Runoff.Total.m", st_mo, "_mm")) } #49 - if(any(simulation_timescales=="monthly") & aon$monthlyHydraulicRedistribution){ + if (aon$monthlyHydraulicRedistribution) { temp <- c(temp, paste0("HydraulicRedistribution.", paste0(rep(c("top", "bottom"), each = 12), "Layers.m", st_mo), "_mm")) } #50 - if(any(simulation_timescales=="monthly") & aon$monthlyInfiltration){ + if (aon$monthlyInfiltration) { temp <- c(temp, paste0("Infiltration.m", st_mo, "_mm")) } #51 - if(any(simulation_timescales=="monthly") & aon$monthlyDeepDrainage){ + if (aon$monthlyDeepDrainage) { temp <- c(temp, paste0("DeepDrainage.m", st_mo, "_mm")) } #52 - if(any(simulation_timescales=="monthly") & aon$monthlySWPmatric){ + if (aon$monthlySWPmatric) { temp <- c(temp, paste0("SWPmatric.", paste0(rep(c("top", "bottom"), each = 12), "Layers.m", st_mo), "_MPa_FromVWCmean")) } #53 a.) - if(any(simulation_timescales=="monthly") & aon$monthlyVWCbulk){ + if (aon$monthlyVWCbulk) { temp <- c(temp, paste0("VWCbulk.", paste0(rep(c("top", "bottom"), each = 12), "Layers.m", st_mo), "_mPERm")) } #53 b.) - if(any(simulation_timescales=="monthly") & aon$monthlyVWCmatric){ + if (aon$monthlyVWCmatric) { temp <- c(temp, paste0("VWCmatric.", paste0(rep(c("top", "bottom"), each = 12), "Layers.m", st_mo), "_mPERm")) } #54 - if(any(simulation_timescales=="monthly") & aon$monthlySWCbulk){ + if (aon$monthlySWCbulk) { temp <- c(temp, paste0("SWCbulk.", paste0(rep(c("top", "bottom"), each = 12), "Layers.m", st_mo), "_mm")) } #55 - if(any(simulation_timescales=="monthly") & aon$monthlySWAbulk){ + if (aon$monthlySWAbulk) { temp <- c(temp, paste0("SWAbulk_", "SWPcrit", rep(fieldtag_SWPcrit_MPa, each = 24), "_", paste0(rep(c("top", "bottom"), each = 12), "Layers.m", st_mo), @@ -1121,40 +1127,40 @@ if(any(simulation_timescales=="daily") && aon$dailyNRCS_SoilMoistureTemperatureR } #56 - if(any(simulation_timescales=="monthly") & aon$monthlyTranspiration){ + if (aon$monthlyTranspiration) { temp <- c(temp, paste0("Transpiration.", paste0(rep(c("top", "bottom"), each = 12), "Layers.m", st_mo), "_mm")) } #57 - if(any(simulation_timescales=="monthly") & aon$monthlySoilEvaporation){ + if (aon$monthlySoilEvaporation) { temp <- c(temp, paste0("Evaporation.Soil.m", st_mo, "_mm")) } #58 - if(any(simulation_timescales=="monthly") & aon$monthlyAET){ + if (aon$monthlyAET) { temp <- c(temp, paste0("AET.m", st_mo, "_mm")) } #59 - if(any(simulation_timescales=="monthly") & aon$monthlyPET){ + if (aon$monthlyPET) { temp <- c(temp, paste0("PET.m", st_mo, "_mm")) } #59.2 - if (any(simulation_timescales == "monthly") && aon$monthlyVPD) { + if (aon$monthlyVPD) { temp <- c(temp, paste0("VPD_m", st_mo, "_kPa")) } #60 - if(any(simulation_timescales=="monthly") & aon$monthlyAETratios){ + if (aon$monthlyAETratios) { temp <- c(temp, paste0(rep(c("TranspToAET.m", "EvapSoilToAET.m"), each = 12), st_mo, "_fraction")) } #61 - if(any(simulation_timescales=="monthly") & aon$monthlyPETratios){ + if (aon$monthlyPETratios) { temp <- c(temp, paste0(rep(c("TranspToPET.m", "EvapSoilToPET.m"), each = 12), st_mo, "_fraction")) } @@ -1162,13 +1168,13 @@ if(any(simulation_timescales=="daily") && aon$dailyNRCS_SoilMoistureTemperatureR ##############################################################---Aggregation: Potential regeneration---############################################################## #62 - if(any(simulation_timescales=="daily") & aon$dailyRegeneration_bySWPSnow) { + if (aon$dailyRegeneration_bySWPSnow) { temp <- c(temp, "Regeneration.Potential.SuitableYears.NSadj_fraction_mean") } #63 - if(any(simulation_timescales=="daily") & aon$dailyRegeneration_GISSM & no.species_regeneration > 0){ - for(sp in 1:no.species_regeneration){ + if (aon$dailyRegeneration_GISSM & no.species_regeneration > 0) { + for (sp in 1:no.species_regeneration) { SeedlingMortality_CausesByYear_colnames <- paste("Seedlings1stSeason.Mortality.", c("UnderneathSnowCover", "ByTmin", "ByTmax", "ByChronicSWPMax", "ByChronicSWPMin", "ByAcuteSWPMin", "DuringStoppedGrowth.DueSnowCover", "DuringStoppedGrowth.DueTmin", "DuringStoppedGrowth.DueTmax"), sep="") @@ -1227,9 +1233,9 @@ if(any(simulation_timescales=="daily") && aon$dailyNRCS_SoilMoistureTemperatureR "PRIMARY KEY (\"P_id\", \"aggfun_id\", \"aggwindow_id\", \"Soil_Layer\")"), collapse = ", ") - if (any(simulation_timescales == "daily") && daily_no > 0) { + if (daily_no > 0) { for (doi in seq_len(daily_no)) { - if(regexpr("SWAbulk", output_aggregate_daily[doi]) > 0){ + if (regexpr("SWAbulk", output_aggregate_daily[doi]) > 0) { agg.resp <- "SWAbulk" #index.SWPcrit <- -as.numeric(sub("kPa", "", sub("SWAatSWPcrit", "", output_aggregate_daily[doi])))/1000 } else { diff --git a/R/2_SWSF_p3of5_ExternalDataExtractions_v51.R b/R/2_SWSF_p3of5_ExternalDataExtractions_v51.R index f4bdf14e..9912cc4a 100644 --- a/R/2_SWSF_p3of5_ExternalDataExtractions_v51.R +++ b/R/2_SWSF_p3of5_ExternalDataExtractions_v51.R @@ -1,54 +1,6 @@ #--------------------------------------------------------------------------------------------------# #------------------------OBTAIN INFORMATION FROM EXTERNAL DATASETS PRIOR TO SIMULATION RUNS TO CREATE THEM -exinfo$GDODCPUCLLNL <- exinfo$ExtractClimateChangeScenarios_CMIP3_BCSD_GDODCPUCLLNL_USA || - exinfo$ExtractClimateChangeScenarios_CMIP3_BCSD_GDODCPUCLLNL_Global || - exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_GDODCPUCLLNL_USA || - exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_GDODCPUCLLNL_Global - -#------Load required packages -if (exinfo$ExtractSkyDataFromNOAAClimateAtlas_USA) { - if (!require(rgdal, quietly=TRUE)) { - tryCatch(install.packages("rgdal", repos=url.Rrepos, lib=dir.libraries), warning=function(w) { print(w); print("rgdal failed to install"); stop("Stopping") }) - stopifnot(require(rgdal, quietly=TRUE)) - } -} - -if ( exinfo$ExtractClimateChangeScenarios_CMIP3_ClimateWizardEnsembles_Global || - exinfo$ExtractClimateChangeScenarios_CMIP3_ClimateWizardEnsembles_USA || - exinfo$ExtractSoilDataFromCONUSSOILFromSTATSGO_USA || - exinfo$ExtractElevation_NED_USA || - exinfo$ExtractElevation_HWSD_Global) { - - if (!require(raster, quietly=TRUE)) { - tryCatch(install.packages("raster", repos=url.Rrepos, lib=dir.libraries), warning=function(w) { print(w); print("raster failed to install"); stop("Stopping") }) - stopifnot(require(raster, quietly=TRUE)) - } -} - -if (exinfo$GDODCPUCLLNL) { - if (!require(ncdf4, quietly=TRUE)) { - tryCatch(install.packages("ncdf4", repos=url.Rrepos, lib=dir.libraries), warning=function(w) { print(w); print("ncdf4 failed to install"); stop("Stopping") }) - stopifnot(require(ncdf4, quietly=TRUE)) - } - - if (!require("compiler", quietly=TRUE)) { - tryCatch(install.packages("compiler", repos=url.Rrepos, lib=dir.libraries), warning=function(w) { print(w); print("'compiler' failed to install"); stop("Stopping") }) - stopifnot(require("compiler", quietly=TRUE)) - } - - useRCurl <- FALSE -} - -if (exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_USA) { - useRCurl <- FALSE - if (!require(RCurl, quietly=TRUE)) { - tryCatch(install.packages("RCurl", repos=url.Rrepos, lib=dir.libraries), warning=function(w) { print(w); print("RCurl failed to install, using 'download.files' instead") }) - } - if (require(RCurl, quietly=TRUE)) useRCurl <- TRUE -} - - if (exinfo$use_sim_spatial) { # extraction functions if (sim_cells_or_points == "point") { @@ -584,23 +536,158 @@ if (exinfo$use_sim_spatial) { } #--------------------------------------------------------------------------------------------------# +#------ Climate scenarios +exinfo$which_NEX <- grepl("NEX", opt_climsc_extr) +exinfo$which_netCDF <- grepl("(GDODCPUCLLNL)|(SageSeer)", opt_climsc_extr) +exinfo$which_ClimateWizard <- grepl("ClimateWizardEnsembles", opt_climsc_extr) + + +#--- Meta information of climate datasets +template_bbox <- data.frame(matrix(NA, nrow = 2, ncol = 2, + dimnames = list(NULL, c("lat", "lon")))) +template_tbox <- data.frame(matrix(NA, nrow = 2, ncol = 2, + dimnames = list(c("start", "end"), c("first", "second")))) + +fill_bounding_box <- function(box, vals) { + box[] <- vals + box +} + +# SoilWat required units are c("cm/day", "C", "C") +var_names_fixed <- c("prcp", "tmin", "tmax", "tmean") + +climDB_metas <- list( + CMIP3_ClimateWizardEnsembles_Global = list( + bbox = fill_bounding_box(template_bbox, list(y = c(-55, 84), x = c(-180, 180))), + tbox = fill_bounding_box(template_tbox, list(t1 = c(NA, NA), t2 = c(2070, 2099))), + units = c(prcp = "%", tmin = "C", tmax = "C", tmean = "C")), + + CMIP3_ClimateWizardEnsembles_USA = list( + bbox = fill_bounding_box(template_bbox, list(y = c(25.125, 49.375), x = c(-124.75, -67))), + tbox = fill_bounding_box(template_tbox, list(t1 = c(NA, NA), t2 = c(2070, 2099))), + units = c(prcp = "%", tmin = "C", tmax = "C", tmean = "C")), + + CMIP3_BCSD_GDODCPUCLLNL_Global = list( + bbox = fill_bounding_box(template_bbox, list(y = c(-55.25-0.25, 83.25+0.25), x = c(-179.75-0.25, 179.75+0.25))), + tbox = fill_bounding_box(template_tbox, list(t1 = c(1950, 1999), t2 = c(2000, 2099))), + var_desc = data.frame(tag = temp <- c("Prcp", "Tmin", "Tmax", "Tavg"), + fileVarTags = paste("monthly", temp, sep = "."), + unit_given = temp <- c("mm/d", "C", "C", "C"), unit_real = temp, + row.names = var_names_fixed, stringsAsFactors = FALSE), + sep_fname = ".", + str_fname = c(id_var = 5, id_gcm = 2, id_scen = 1, id_run = 3, id_time = 6)), + + CMIP5_BCSD_GDODCPUCLLNL_Global = list( + bbox = fill_bounding_box(template_bbox, list(y = c(-55.25-0.25, 83.25+0.25), x = c(-179.75-0.25, 179.75+0.25))), + tbox = fill_bounding_box(template_tbox, list(t1 = c(1950, 2005), t2 = c(2006, 2099))), + var_desc = data.frame(tag = temp <- c("pr", "tasmin", "tasmax", "tas"), + fileVarTags = paste0("_", temp, "_"), + unit_given = temp <- c("mm/d", "C", "C", "C"), unit_real = temp, + row.names = var_names_fixed, stringsAsFactors = FALSE), + sep_fname = "_", + str_fname = c(id_var = 3, id_gcm = 5, id_scen = 6, id_run = 7, id_time = 8)), + + CMIP3_BCSD_GDODCPUCLLNL_USA = list( + bbox = fill_bounding_box(template_bbox, list(y = c(25.125, 52.875), x = c(-124.625, -67))), + tbox = fill_bounding_box(template_tbox, list(t1 = c(1950, 1999), t2 = c(2000, 2099))), + var_desc = data.frame(tag = temp <- c("Prcp", "Tmin", "Tmax", "Tavg"), + fileVarTags = paste("monthly", temp, sep = "."), + unit_given = temp <- c("mm/d", "C", "C", "C"), unit_real = temp, + row.names = var_names_fixed, stringsAsFactors = FALSE), + sep_fname = ".", + str_fname = c(id_var = 5, id_gcm = 2, id_scen = 1, id_run = 3, id_time = 6)), + + CMIP5_BCSD_GDODCPUCLLNL_USA = list( + bbox = fill_bounding_box(template_bbox, list(y = c(25.125, 52.875), x = c(-124.625, -67))), + tbox = fill_bounding_box(template_tbox, list(t1 = c(1950, 2005), t2 = c(2006, 2099))), + var_desc = data.frame(tag = temp <- c("pr", "tasmin", "tasmax", "tas"), + fileVarTags = paste0("_", temp, "_"), + unit_given = temp <- c("mm/d", "C", "C", "C"), unit_real = temp, + row.names = var_names_fixed, stringsAsFactors = FALSE), + sep_fname = "_", + str_fname = c(id_var = 3, id_gcm = 5, id_scen = 6, id_run = 7, id_time = 8)), + + CMIP5_BCSD_NEX_USA = list( + bbox = fill_bounding_box(template_bbox, list(y = c(24.0625, 49.9375), x = c(-125.02083333, -66.47916667))), + tbox = fill_bounding_box(template_tbox, list(t1 = c(1950, 2005), t2 = c(2006, 2099))), + var_desc = data.frame(tag = temp <- c("pr", "tasmin", "tasmax", "tas"), + fileVarTags = paste0("_", temp, "_"), + unit_given = temp <- c("kg/m2/s", "K", "K", "K"), unit_real = temp, + row.names = var_names_fixed, stringsAsFactors = FALSE), + sep_fname = NULL, str_fname = NULL), # online access, i.e., no file names to parse + + CMIP5_BCSD_SageSeer_USA = list( + bbox = fill_bounding_box(template_bbox, list(y = c(31.75333, 49.00701), x = c(-124.2542, -102.2534))), + tbox = fill_bounding_box(template_tbox, list(t1 = c(1980, 1999), t2 = c(2070, 2099))), + var_desc = data.frame(tag = temp <- c("pr", "tasmin", "tasmax", "tas"), + fileVarTags = paste0("_", temp, "_"), + unit_given = c("kg m-2 s-1", "K", "K", "K"), + unit_real = c("mm/month", "C", "C", "C"), + row.names = var_names_fixed, stringsAsFactors = FALSE), + sep_fname = "_", + str_fname = c(id_var = 2, id_gcm = 4, id_scen = 5, id_run = 6, id_time = 7)) +) + + +#---Allow for multiple data sources +# - among sites but not multiple sources per site (for that you need a new row in the MasterInput spreadsheet) +# - The data source is used that is the first in 'opt_climsc_extr' and covers a location + +if (exinfo$ExtractClimateChangeScenarios) { + xy <- with(SWRunInformation[runIDs_sites,], data.frame(X_WGS84, Y_WGS84)) + + if (extract_determine_database == "SWRunInformation" && "GCM_sources" %in% colnames(SWRunInformation)) { + sites_GCM_source <- SWRunInformation$GCM_sources[runIDs_sites] + + } else if (extract_determine_database == "order" || !("GCM_sources" %in% colnames(SWRunInformation))) { + sites_GCM_source <- rep(NA, times = runsN_sites) + i_use <- rep(FALSE, times = runsN_sites) + + # determine which data product to use for each site based on bounding boxes of datasets + for (ds in opt_climsc_extr) { + i_use <- in_box(xy, climDB_metas[[ds]][["bbox"]]$lon, + climDB_metas[[ds]][["bbox"]]$lat, i_use) + + sites_GCM_source[i_use] <- ds + } + + #write data to datafile.SWRunInformation + SWRunInformation$GCM_sources[runIDs_sites] <- as.character(sites_GCM_source) + write.csv(SWRunInformation, file = file.path(dir.in, datafile.SWRunInformation), row.names = FALSE) + unlink(file.path(dir.in, datafile.SWRWinputs_preprocessed)) + + } else { + + stop(paste("Value of 'extract_determine_database'", extract_determine_database, "not implemented")) + } + + if (anyNA(sites_GCM_source)) + print(paste("No climate change data available for", sum(is.na(sites_GCM_source)), "sites")) +} + + #------Load additional parameters and functions for data from the Lawrence Livermore National Lab and from USGS NEX -if (exinfo$GDODCPUCLLNL || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_USA) { +if (exinfo$ExtractClimateChangeScenarios && + any(exinfo$which_NEX) || any(exinfo$which_netCDF)) { stopifnot(getCurrentWeatherDataFromDatabase, getScenarioWeatherDataFromDatabase) - dbW_setConnection(dbFilePath=dbWeatherDataFile) - dbW_iSiteTable <- dbW_getSiteTable() - dbW_iScenarioTable <- dbW_getScenariosTable() + Rsoilwat31::dbW_setConnection(dbFilePath=dbWeatherDataFile) + dbW_iSiteTable <- Rsoilwat31::dbW_getSiteTable() + dbW_iScenarioTable <- Rsoilwat31::dbW_getScenariosTable() dbW_compression_type <- Rsoilwat31:::dbW_compression() - climScen <- data.frame(matrix(unlist(strsplit(temp <- climate.conditions[!grepl(climate.ambient, climate.conditions)], split=".", fixed=TRUE)), ncol=4, byrow=TRUE), stringsAsFactors=FALSE) - climScen$imap_todbW <- match(temp, table=dbW_iScenarioTable$Scenario, nomatch=0) + sctemp <- climate.conditions[!grepl(climate.ambient, climate.conditions)] + temp <- strsplit(sctemp, split = ".", fixed = TRUE) + if (!all(lengths(temp) == 4L)) + stop("'climate.conditions' are mal-formed: they must contain 4 elements that are concatenated by '.'") + climScen <- data.frame(matrix(unlist(temp), ncol = 4, byrow = TRUE), stringsAsFactors = FALSE) + climScen$imap_todbW <- match(sctemp, table = dbW_iScenarioTable$Scenario, nomatch = 0) dbW_iScenarioTable[, "Scenario"] <- tolower(dbW_iScenarioTable[, "Scenario"]) reqGCMs <- unique(climScen[, 4]) reqRCPs <- unique(climScen[, 3]) - reqRCPsPerGCM <- lapply(reqGCMs, FUN=function(x) unique(climScen[x == climScen[, 4], 3])) - reqDownscalingsPerGCM <- lapply(reqGCMs, FUN=function(x) unique(climScen[x == climScen[, 4], 1])) + reqRCPsPerGCM <- lapply(reqGCMs, function(x) unique(climScen[x == climScen[, 4], 3])) + reqDownscalingsPerGCM <- lapply(reqGCMs, function(x) unique(climScen[x == climScen[, 4], 1])) for (i in seq_along(reqGCMs)) { dir.create2(file.path(dir.out.temp, reqGCMs[i]), showWarnings=FALSE, recursive=TRUE) @@ -609,10 +696,6 @@ if (exinfo$GDODCPUCLLNL || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_U #---Downscaling/bias-correction functions #Helper functions - in_GMC_box <- compiler::cmpfun(function(xy, lats, longs, i_use) { - !i_use & (xy[, 1] >= longs[1] & xy[, 1] <= longs[2]) & (xy[, 2] >= lats[1] & xy[, 2] <= lats[2]) - }) - unique_times <- compiler::cmpfun(function(timeSlices, slice) { starts <- na.exclude(timeSlices$Year[timeSlices$Slice == slice & timeSlices$Time == "start"]) ends <- na.exclude(timeSlices$Year[timeSlices$Slice == slice & timeSlices$Time == "end"]) @@ -846,7 +929,7 @@ if (exinfo$GDODCPUCLLNL || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_U applyDeltas <- compiler::cmpfun(function(obs.hist.daily, obs.hist.monthly, delta_ts, ppt_fun, sigmaN = 6, do_checks=FALSE) { dailyPPTceiling <- 1.5 * max(sapply(obs.hist.daily, FUN=function(obs) max(obs@data[,4]))) #Hamlet et al. 2010: "an arbitrary ceiling of 150% of the observed maximum precipitation value for each cell is also imposed by “spreading out” very large daily precipitation values into one or more adjacent days" - res <- try(lapply(obs.hist.daily, FUN=function(obs) { + res <- lapply(obs.hist.daily, function(obs) { month <- as.POSIXlt(paste(obs@year, obs@data[, "DOY"], sep="-"), format="%Y-%j", tz = "UTC")$mon + 1 ydelta <- delta_ts[delta_ts[, "Year"] == obs@year, -(1:2)] tmax <- obs@data[, "Tmax_C"] + ydelta[month, "Tmax_C"] @@ -856,7 +939,7 @@ if (exinfo$GDODCPUCLLNL || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_U tmin <- obs@data[, "Tmin_C"] + ydelta[month, "Tmin_C"] if (do_checks) test_sigmaNormal(data=tmin, sigmaN) - ppt_data <- unlist(lapply(1:12, FUN=function(m) { + ppt_data <- unlist(lapply(1:12, function(m) { im_month <- month == m m_ydelta <- ydelta[m, 3] m_data <- obs@data[im_month, "PPT_cm"] @@ -895,12 +978,16 @@ if (exinfo$GDODCPUCLLNL || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_U return(res) })) - ppt <- controlExtremePPTevents(data=ppt_data, dailyPPTceiling, do_checks=do_checks, sigmaN = sigmaN) + ppt <- controlExtremePPTevents(data = ppt_data, dailyPPTceiling, + do_checks = do_checks, sigmaN = sigmaN) - new("swWeatherData", data=round(data.matrix(cbind(obs@data[, "DOY"], tmax, tmin, ppt), rownames.force=FALSE), 2), year=obs@year) - }), silent=TRUE) + new("swWeatherData", data = + round(data.matrix(cbind(obs@data[, "DOY"], tmax, tmin, ppt), + rownames.force = FALSE), 2), + year = obs@year) + }) - return(res) + res }) @@ -1034,29 +1121,31 @@ if (exinfo$GDODCPUCLLNL || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_U }) - applyDelta_oneYear <- compiler::cmpfun(function(obs, delta_ts, ppt_fun, daily, monthly, ppt_type = c("simple", "detailed"), dailyPPTceiling, sigmaN, do_checks) { - ppt_type <- match.arg(ppt_type) + applyDelta_oneYear <- compiler::cmpfun(function(obs, delta_ts, ppt_fun, daily, monthly, + ppt_type = NULL, dailyPPTceiling, sigmaN, do_checks) { + + ppt_type <- match.arg(ppt_type, c(NA, "detailed", "simple")) month <- 1 + as.POSIXlt(seq(ISOdate(obs@year, 1, 1, tz = "UTC"), - ISOdate(obs@year, 12, 31, tz = "UTC"), by = "day"))$mon + ISOdate(obs@year, 12, 31, tz = "UTC"), by = "day"))$mon ydeltas <- delta_ts[delta_ts[, "Year"] == obs@year, -(1:2)] add_days <- ppt_fun[month] == "+" mult_days <- !add_days PPT_to_remove <- 0 tmax <- obs@data[, "Tmax_C"] + ydeltas[month, "Tmax_C"] - if (do_checks) test_sigmaNormal(data=tmax, sigmaN) + if (do_checks) test_sigmaNormal(data = tmax, sigmaN) tmin <- obs@data[, "Tmin_C"] + ydeltas[month, "Tmin_C"] - if (do_checks) test_sigmaNormal(data=tmin, sigmaN) + if (do_checks) test_sigmaNormal(data = tmin, sigmaN) - if (ppt_type == "simple") { + if (isTRUE(ppt_type == "simple")) { ppt <- applyPPTdelta_simple(m = month, data = obs@data[, "PPT_cm"], ydelta = ydeltas[month, "PPT_cm"], add_days = add_days, mult_days = mult_days) - } else if (ppt_type == "detailed") { + } else if (isTRUE(ppt_type == "detailed")) { temp <- applyPPTdelta_detailed(m = month, data = obs@data[, "PPT_cm"], ydelta = ydeltas[month, "PPT_cm"], @@ -1070,6 +1159,8 @@ if (exinfo$GDODCPUCLLNL || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_U do_checks = do_checks, dailyPPTceiling = dailyPPTceiling, sigmaN = sigmaN) + } else { + stop(paste("'applyDelta_oneYear': argument not recognized: ppt_type =", ppt_type)) } @@ -1082,8 +1173,8 @@ if (exinfo$GDODCPUCLLNL || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_U }) - applyDeltas2 <- compiler::cmpfun(function(daily, monthly, years, delta_ts, ppt_fun, ppt_type = c("simple", "detailed"), dailyPPTceiling, sigmaN, do_checks = FALSE) { - ppt_type <- match.arg(ppt_type) + applyDeltas2 <- compiler::cmpfun(function(daily, monthly, years, delta_ts, ppt_fun, + ppt_type = NULL, dailyPPTceiling, sigmaN, do_checks = FALSE) { sw_list <- list() totalPPT_to_remove <- 0 @@ -1091,7 +1182,9 @@ if (exinfo$GDODCPUCLLNL || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_U for (i in seq_along(daily)) { temp <- applyDelta_oneYear(obs = daily[[i]], delta_ts = delta_ts, ppt_fun = ppt_fun, daily = daily, monthly = monthly, - ppt_type = ppt_type, dailyPPTceiling = dailyPPTceiling, sigmaN = sigmaN, do_checks = do_checks) + ppt_type = ppt_type, dailyPPTceiling = dailyPPTceiling, sigmaN = sigmaN, + do_checks = do_checks) + sw_list[[i]] <- temp[["sw"]] totalPPT_to_remove <- totalPPT_to_remove + temp[["PPT_to_remove"]] } @@ -1106,7 +1199,8 @@ if (exinfo$GDODCPUCLLNL || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_U if (totalPPT > abs(totalPPT_to_remove)) { daily2[, "PPT_cm"] <- daily2[, "PPT_cm"] * (1 - abs(totalPPT_to_remove) / totalPPT) } else { - warning(paste("Total site precipitation should be reduced on average by a further", round((abs(totalPPT_to_remove) - totalPPT) / length(daily), 2), "cm / year")) + print(paste("Total site precipitation should be reduced on average by a further", + round((abs(totalPPT_to_remove) - totalPPT) / length(daily), 2), "cm / year")) daily2[, "PPT_cm"] <- 0 } @@ -1129,15 +1223,19 @@ if (exinfo$GDODCPUCLLNL || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_U #' @param monthly A numeric matrix. Monthly time-series of observed weather calculated from \code{daily} for the years simstartyr:endyr. #' @param scen.hist.monthly A numeric matrix. Monthly time-series of scenario weather during the historic time period DScur_startyr:DScur_endyr #' @param scen.fut.monthly A numeric matrix. Monthly time-series of scenario weather during the projected time period DSfut_startyr:DSfut_endyr - #' @param downscaling.options A named list. + #' @param opt_DS A named list. #' @param do_checks A logical value. If \code{TRUE} perform several sanity checks on the data. - downscale <- function(obs.hist.daily, obs.hist.monthly, scen.fut.monthly, downscaling.options, do_checks = TRUE) {} + downscale <- function(obs.hist.daily, obs.hist.monthly, scen.fut.monthly, opt_DS, do_checks = TRUE) {} #' Time periods for downscaling functions #' @inheritParams downscale - downscale.periods <- compiler::cmpfun(function(obs.hist.daily, obs.hist.monthly, scen.hist.monthly = NULL, scen.fut.monthly = NULL, years = NULL, DScur_startyear = NULL, DScur_endyear = NULL, DSfut_startyear = NULL, DSfut_endyear = NULL) { + downscale.periods <- compiler::cmpfun(function(obs.hist.daily, obs.hist.monthly, + scen.hist.monthly = NULL, scen.fut.monthly = NULL, years = NULL, + DScur_startyear = NULL, DScur_endyear = NULL, + DSfut_startyear = NULL, DSfut_endyear = NULL) { + # Time periods # - historic observed period: simstartyr:endyr dyears <- sapply(obs.hist.daily, function(obs) obs@year) @@ -1154,18 +1252,24 @@ if (exinfo$GDODCPUCLLNL || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_U if (is.null(DScur_endyear)) DScur_endyear <- scen.hist.monthly[nrow(scen.hist.monthly), 1] iuse_scen_hist_m <- scen.hist.monthly[, 1] >= DScur_startyear & scen.hist.monthly[, 1] <= DScur_endyear if (!(sum(iuse_scen_hist_m) == (DScur_endyear - DScur_startyear + 1) * 12)) { - warning("downscale.periods: resulting record of 'scen.hist.monthly' covers only the years ", paste(range(scen.hist.monthly[iuse_scen_hist_m, 1]), collapse = "-"), " instead of the requested ", DScur_startyear, "-", DScur_endyear, immediate. = TRUE) + print(paste0("downscale.periods: resulting record of 'scen.hist.monthly' covers only the years ", + paste(range(scen.hist.monthly[iuse_scen_hist_m, 1]), collapse = "-"), + " instead of the requested ", DScur_startyear, "-", DScur_endyear)) } + } else { DScur_startyear <- DScur_endyear <- iuse_scen_hist_m <- NULL } + # - future training period: DSfut_startyear:DSfut_endyear if (!is.null(scen.fut.monthly)) { if (is.null(DSfut_startyear)) DSfut_startyear <- scen.fut.monthly[1, 1] if (is.null(DSfut_endyear)) DSfut_endyear <- scen.fut.monthly[nrow(scen.fut.monthly), 1] iuse_scen_fut_m <- scen.fut.monthly[, 1] >= DSfut_startyear & scen.fut.monthly[, 1] <= DSfut_endyear if (!(sum(iuse_scen_fut_m) == (DSfut_endyear - DSfut_startyear + 1) * 12)) { - warning("downscale.periods: resulting record of 'scen.fut.monthly' covers only the years ", paste(range(scen.fut.monthly[iuse_scen_fut_m, 1]), collapse = "-"), " instead of the requested ", DSfut_startyear, "-", DSfut_endyear, immediate. = TRUE) + print(paste0("downscale.periods: resulting record of 'scen.fut.monthly' covers only the years ", + paste(range(scen.fut.monthly[iuse_scen_fut_m, 1]), collapse = "-"), + " instead of the requested ", DSfut_startyear, "-", DSfut_endyear)) } } else { DSfut_startyear <- DSfut_endyear <- iuse_scen_fut_m <- NULL @@ -1188,12 +1292,24 @@ if (exinfo$GDODCPUCLLNL || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_U #' #' @references Lenderink, G., A. Buishand, and W. van Deursen. 2007. Estimates of future discharges of the river Rhine using two scenario methodologies: direct versus delta approach. Hydrology and Earth System Sciences 11:1145-1159. #' @export - downscale.raw <- compiler::cmpfun(function(obs.hist.daily, obs.hist.monthly, scen.fut.monthly, years = NULL, DScur_startyear = NULL, DScur_endyear = NULL, DSfut_startyear = NULL, DSfut_endyear = NULL, downscaling.options = list(applyPPT_type = "detailed", sigmaN = 6, PPTratioCutoff = 10), dailyPPTceiling, do_checks=TRUE) { + downscale.raw <- compiler::cmpfun(function(obs.hist.daily, obs.hist.monthly, + scen.fut.monthly, years = NULL, + DScur_startyear = NULL, DScur_endyear = NULL, + DSfut_startyear = NULL, DSfut_endyear = NULL, + opt_DS = list(ppt_type = "detailed", sigmaN = 6, PPTratioCutoff = 10), + dailyPPTceiling, do_checks = TRUE, ...) { + # Time periods - tp <- downscale.periods(obs.hist.daily, obs.hist.monthly, scen.hist.monthly = NULL, scen.fut.monthly, years, DScur_startyear, DScur_endyear, DSfut_startyear, DSfut_endyear) - if (any(!tp$iuse_obs_hist_d)) obs.hist.daily <- obs.hist.daily[tp$iuse_obs_hist_d] - if (any(!tp$iuse_obs_hist_m)) obs.hist.monthly <- obs.hist.monthly[tp$iuse_obs_hist_m, ] - if (any(!tp$iuse_scen_fut_m)) scen.fut.monthly <- scen.fut.monthly[tp$iuse_scen_fut_m, ] + tp <- downscale.periods(obs.hist.daily, obs.hist.monthly, scen.hist.monthly = NULL, + scen.fut.monthly, years, DScur_startyear, DScur_endyear, + DSfut_startyear, DSfut_endyear) + + if (any(!tp$iuse_obs_hist_d)) + obs.hist.daily <- obs.hist.daily[tp$iuse_obs_hist_d] + if (any(!tp$iuse_obs_hist_m)) + obs.hist.monthly <- obs.hist.monthly[tp$iuse_obs_hist_m, ] + if (any(!tp$iuse_scen_fut_m)) + scen.fut.monthly <- scen.fut.monthly[tp$iuse_scen_fut_m, ] # 1. Calculate mean monthly values in historic and future scenario values scen.fut.mean_tmax <- tapply(scen.fut.monthly[, "tmax"], INDEX = scen.fut.monthly[, "month"], mean, na.rm = TRUE) @@ -1215,18 +1331,21 @@ if (exinfo$GDODCPUCLLNL || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_U delta_ts[, "Tmax_C"] <- scen.fut.mean_tmax - obs.hist.mean_tmax delta_ts[, "Tmin_C"] <- scen.fut.mean_tmin - obs.hist.mean_tmin delta_ppts <- scen.fut.mean_ppt / obs.hist.mean_ppt - temp_add <- delta_ppts < 1 / (10 * downscaling.options[["PPTratioCutoff"]]) | delta_ppts > downscaling.options[["PPTratioCutoff"]] + temp_add <- obs.hist.mean_ppt < tol | + delta_ppts < 1 / (10 * opt_DS[["PPTratioCutoff"]]) | + delta_ppts > opt_DS[["PPTratioCutoff"]] if (any(temp_add)) { ppt_fun[temp_add] <- "+" - delta_ppts[temp_add] <- scen.fut.mean_ppt - obs.hist.mean_ppt + delta_ppts[temp_add] <- scen.fut.mean_ppt[temp_add] - obs.hist.mean_ppt[temp_add] } delta_ts[, "PPT_cm"] <- delta_ppts # 3. Apply deltas to historic daily weather - try(applyDeltas2(daily = obs.hist.daily, monthly = obs.hist.monthly, + applyDeltas2(daily = obs.hist.daily, monthly = obs.hist.monthly, years = tp$years, delta_ts = delta_ts, ppt_fun = ppt_fun, - ppt_type = downscaling.options[["applyPPT_type"]], dailyPPTceiling = dailyPPTceiling, sigmaN = downscaling.options[["sigmaN"]], do_checks = do_checks), silent = TRUE) + ppt_type = opt_DS[["ppt_type"]], dailyPPTceiling = dailyPPTceiling, + sigmaN = opt_DS[["sigmaN"]], do_checks = do_checks) }) #' Downscale with the 'delta approach' @@ -1236,13 +1355,24 @@ if (exinfo$GDODCPUCLLNL || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_U #' @references Hay, L. E., R. L. Wilby, and G. H. Leavesley. 2000. A comparison of delta change and downscaled gcm scenarios for three mountainous basins in the United States. Journal of the American Water Resources Association 36:387-397. #' @references Hamlet, A. F., E. P. Salathé, and P. Carrasco. 2010. Statistical downscaling techniques for global climate model simulations of temperature and precipitation with application to water resources planning studies. Chapter 4. Final Report for the Columbia Basin Climate Change Scenarios Project. Climate Impacts Group, Center for Science in the Earth System, Joint Institute for the Study of the Atmosphere and Ocean, University of Washington, Seattle, WA. #' @export - downscale.delta <- compiler::cmpfun(function(obs.hist.daily, obs.hist.monthly, scen.hist.monthly, scen.fut.monthly, years = NULL, DScur_startyear = NULL, DScur_endyear = NULL, DSfut_startyear = NULL, DSfut_endyear = NULL, downscaling.options = list(applyPPT_type = "detailed", sigmaN = 6, PPTratioCutoff = 10), dailyPPTceiling, do_checks = TRUE) { + downscale.delta <- compiler::cmpfun(function(obs.hist.daily, obs.hist.monthly, + scen.hist.monthly, scen.fut.monthly, years = NULL, + DScur_startyear = NULL, DScur_endyear = NULL, + DSfut_startyear = NULL, DSfut_endyear = NULL, + opt_DS = list(ppt_type = "detailed", sigmaN = 6, PPTratioCutoff = 10), + dailyPPTceiling, do_checks = TRUE, ...) { # Time periods - tp <- downscale.periods(obs.hist.daily, obs.hist.monthly, scen.hist.monthly, scen.fut.monthly, years, DScur_startyear, DScur_endyear, DSfut_startyear, DSfut_endyear) - if (any(!tp$iuse_obs_hist_d)) obs.hist.daily <- obs.hist.daily[tp$iuse_obs_hist_d] - if (any(!tp$iuse_obs_hist_m)) obs.hist.monthly <- obs.hist.monthly[tp$iuse_obs_hist_m, ] - if (any(!tp$iuse_scen_hist_m)) scen.hist.monthly <- scen.hist.monthly[tp$iuse_scen_hist_m, ] - if (any(!tp$iuse_scen_fut_m)) scen.fut.monthly <- scen.fut.monthly[tp$iuse_scen_fut_m, ] + tp <- downscale.periods(obs.hist.daily, obs.hist.monthly, scen.hist.monthly, + scen.fut.monthly, years, DScur_startyear, DScur_endyear, DSfut_startyear, DSfut_endyear) + + if (any(!tp$iuse_obs_hist_d)) + obs.hist.daily <- obs.hist.daily[tp$iuse_obs_hist_d] + if (any(!tp$iuse_obs_hist_m)) + obs.hist.monthly <- obs.hist.monthly[tp$iuse_obs_hist_m, ] + if (any(!tp$iuse_scen_hist_m)) + scen.hist.monthly <- scen.hist.monthly[tp$iuse_scen_hist_m, ] + if (any(!tp$iuse_scen_fut_m)) + scen.fut.monthly <- scen.fut.monthly[tp$iuse_scen_fut_m, ] # 1. Calculate mean monthly values in historic and future scenario values scen.fut.mean_tmax <- tapply(scen.fut.monthly[, "tmax"], INDEX = scen.fut.monthly[, "month"], mean, na.rm = TRUE) @@ -1265,19 +1395,22 @@ if (exinfo$GDODCPUCLLNL || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_U delta_ts[, "Tmax_C"] <- scen.fut.mean_tmax - scen.hist.mean_tmax delta_ts[, "Tmin_C"] <- scen.fut.mean_tmin - scen.hist.mean_tmin delta_ppts <- scen.fut.mean_ppt / scen.hist.mean_ppt - temp_add <- delta_ppts < 1 / (10 * downscaling.options[["PPTratioCutoff"]]) | delta_ppts > downscaling.options[["PPTratioCutoff"]] + temp_add <- scen.hist.mean_ppt < tol | + delta_ppts < 1 / (10 * opt_DS[["PPTratioCutoff"]]) | + delta_ppts > opt_DS[["PPTratioCutoff"]] if (any(temp_add)) { ppt_fun[temp_add] <- "+" - delta_ppts[temp_add] <- scen.fut.mean_ppt - scen.hist.mean_ppt + delta_ppts[temp_add] <- scen.fut.mean_ppt[temp_add] - scen.hist.mean_ppt[temp_add] } delta_ts[, "PPT_cm"] <- delta_ppts # 3. Apply deltas to historic daily weather - try(applyDeltas2(daily = obs.hist.daily, monthly = obs.hist.monthly, + applyDeltas2(daily = obs.hist.daily, monthly = obs.hist.monthly, years = tp$years, delta_ts = delta_ts, ppt_fun = ppt_fun, - ppt_type = downscaling.options[["applyPPT_type"]], dailyPPTceiling = dailyPPTceiling, sigmaN = downscaling.options[["sigmaN"]], do_checks = do_checks), silent = TRUE) + ppt_type = opt_DS[["ppt_type"]], dailyPPTceiling = dailyPPTceiling, + sigmaN = opt_DS[["sigmaN"]], do_checks = do_checks) }) #' Downscale with the 'delta-hybrid approach' old version (prior to May 2016) @@ -1293,7 +1426,12 @@ if (exinfo$GDODCPUCLLNL || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_U #' @references Dickerson-Lange, S. E., and R. Mitchell. 2014. Modeling the effects of climate change projections on streamflow in the Nooksack River basin, Northwest Washington. Hydrological Processes:doi: 10.1002/hyp.10012. #' @references Wang, L., and W. Chen. 2014. Equiratio cumulative distribution function matching as an improvement to the equidistant approach in bias correction of precipitation. Atmospheric Science Letters 15:1-6. #' @export - downscale.deltahybrid <- compiler::cmpfun(function(obs.hist.daily, obs.hist.monthly, scen.hist.monthly, scen.fut.monthly, years = NULL, DScur_startyear = NULL, DScur_endyear = NULL, DSfut_startyear = NULL, DSfut_endyear = NULL, downscaling.options = list(sigmaN = 6, PPTratioCutoff = 10), do_checks=TRUE) { + downscale.deltahybrid <- compiler::cmpfun(function(obs.hist.daily, obs.hist.monthly, + scen.hist.monthly, scen.fut.monthly, years = NULL, + DScur_startyear = NULL, DScur_endyear = NULL, + DSfut_startyear = NULL, DSfut_endyear = NULL, + opt_DS = list(sigmaN = 6, PPTratioCutoff = 10), + do_checks = TRUE, ...) { #Functions eCDF.Cunnane <- function(x) { na_N <- sum(is.na(x)) @@ -1340,18 +1478,28 @@ if (exinfo$GDODCPUCLLNL || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_U # 2. Adjust future scenario with quantile-based deltas from historic comparison for future scenario values with linear extrapolation # - Additive approach (Anandhi et al. 2011): Temp, close-to-zero PPT, small or very large PPT ratios # - Multiplicative approach (Wang et al. 2014): PPT otherwise - scHistToFut <- scen.hist.ecdf$fun(scen.fut.ecdf$q, extrapol="linear") - scHistToFutRatio <- obs.hist.ecdf$fun(scen.fut.ecdf$q, extrapol="linear") / scHistToFut - if (any(iv <= 2, any(scHistToFut < 1/(10*downscaling.options[["PPTratioCutoff"]])), any(scHistToFutRatio > downscaling.options[["PPTratioCutoff"]]), any(scHistToFutRatio < 1/downscaling.options[["PPTratioCutoff"]]))) { + scHistToFut <- scen.hist.ecdf$fun(scen.fut.ecdf$q, extrapol = "linear") + scHistToFutRatio <- obs.hist.ecdf$fun(scen.fut.ecdf$q, extrapol = "linear") / scHistToFut + + if (any(iv <= 2, + scHistToFut < 1 / (10 * opt_DS[["PPTratioCutoff"]]), + scHistToFutRatio > opt_DS[["PPTratioCutoff"]], + scHistToFutRatio < 1 / opt_DS[["PPTratioCutoff"]])) { scen.fut.xadj <- scen.fut.x + obs.hist.ecdf$fun(scen.fut.ecdf$q, extrapol="linear") - scHistToFut - if (all(iv == 3, sum(temp0 <- (scen.fut.xadj < 0)) > 0)) scen.fut.xadj[temp0] <- 0 + + if (all(iv == 3, sum(temp0 <- (scen.fut.xadj < 0)) > 0)) + scen.fut.xadj[temp0] <- 0 + } else { scen.fut.xadj <- scen.fut.x * scHistToFutRatio } + stopifnot(is.finite(scen.fut.xadj)) if (do_checks) { - if (iv <= 2) test_sigmaNormal(data=scen.fut.xadj, downscaling.options[["sigmaN"]]) - if (iv == 3) test_sigmaGamma(data=scen.fut.xadj, downscaling.options[["sigmaN"]]) + if (iv <= 2) + test_sigmaNormal(data=scen.fut.xadj, opt_DS[["sigmaN"]]) + if (iv == 3) + test_sigmaGamma(data=scen.fut.xadj, opt_DS[["sigmaN"]]) } # 3. Calculate eCDF of future adjusted scenario @@ -1362,9 +1510,15 @@ if (exinfo$GDODCPUCLLNL || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_U # - Multiplicative approach (Wang et al. 2014): PPT otherwise scHistToHist <- obs.hist.ecdf$fun(obs.hist.ecdf$q, extrapol="linear") scHistToFutRatio <- scen.fut2.ecdf$fun(obs.hist.ecdf$q, extrapol="linear") / scHistToHist - if (any(iv <= 2, any(scHistToHist < 1/(10*downscaling.options[["PPTratioCutoff"]])), any(scHistToFutRatio > downscaling.options[["PPTratioCutoff"]]), any(scHistToFutRatio < 1/downscaling.options[["PPTratioCutoff"]]))) { + + if (any(iv <= 2, + scHistToHist < 1 / (10 * opt_DS[["PPTratioCutoff"]]), + scHistToFutRatio > opt_DS[["PPTratioCutoff"]], + scHistToFutRatio < 1 / opt_DS[["PPTratioCutoff"]])) { mapFut <- scen.fut2.ecdf$fun(obs.hist.ecdf$q, extrapol="linear") - scHistToHist - if (iv == 3) ppt_fun[m] <- "+" + if (iv == 3) + ppt_fun[m] <- "+" + } else { mapFut <- scHistToFutRatio stopifnot(all(!is.infinite(mapFut)), all(!is.nan(mapFut))) #if (sum(temp <- is.nan(mapFut)) > 0) mapFut[temp] <- 0 @@ -1374,17 +1528,21 @@ if (exinfo$GDODCPUCLLNL || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_U } # 6. Apply deltas to historic daily weather - applyDeltas(obs.hist.daily, obs.hist.monthly, delta_ts, ppt_fun, downscaling.options[["sigmaN"]], do_checks=do_checks) + applyDeltas(obs.hist.daily, obs.hist.monthly, delta_ts, ppt_fun, opt_DS[["sigmaN"]], do_checks=do_checks) }) #------------------------ - doQmapQUANT.default_drs <- compiler::cmpfun(function(x, fobj, type = c("linear", "tricub"), linear_extrapolation=c("Boe", "Thermessl2012CC.QMv1b", "none"), spline_method=c("monoH.FC", "fmm", "natural"), monthly_extremes=NULL, correctSplineFun_type=c("fail", "none", "attempt"), ...) { + doQmapQUANT.default_drs <- compiler::cmpfun(function(x, fobj, type = NULL, + lin_extrapol = NULL, spline_method = NULL, + monthly_extremes = NULL, fix_spline = NULL, ...) { + # Note: differs from call to call if jitter correction is used - type <- match.arg(type) - linear_extrapolation <- match.arg(linear_extrapolation) - spline_method <- match.arg(spline_method) - correctSplineFun_type <- match.arg(correctSplineFun_type) + type <- match.arg(type, c(NA, "linear", "tricub")) + lin_extrapol <- match.arg(lin_extrapol, + c(NA, "Boe", "Thermessl2012CC.QMv1b", "none")) + spline_method <- match.arg(spline_method, c(NA, "monoH.FC", "fmm", "natural")) + fix_spline <- match.arg(fix_spline, c(NA, "fail", "none", "attempt")) wet <- if (!is.null(fobj$wet.day)) { x >= fobj$wet.day @@ -1393,11 +1551,13 @@ if (exinfo$GDODCPUCLLNL || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_U } out <- rep(NA, length.out = length(x)) - if (type == "linear") { - out[wet] <- approx(x = fobj$par$modq[, 1], y = fobj$par$fitq[, 1], xout = x[wet], method = "linear", rule = 2, ties = mean)$y - if (!(linear_extrapolation == "none")) { + if (isTRUE(type == "linear")) { + out[wet] <- approx(x = fobj$par$modq[, 1], y = fobj$par$fitq[, 1], xout = x[wet], + method = "linear", rule = 2, ties = mean)$y + + if (!isTRUE(lin_extrapol == "none")) { # "same extrapolation as Boe et al. (2007), but neglecting the three highest/lowest correction terms" Thermessl et al. 2011 Climatic Change - qid <- switch(linear_extrapolation, Boe=0, Thermessl2012CC.QMv1b=3) + qid <- switch(lin_extrapol, Boe = 0, Thermessl2012CC.QMv1b = 3) nq <- nrow(fobj$par$modq) largex <- x > fobj$par$modq[nq, 1] + tol if (any(largex)) { @@ -1410,7 +1570,7 @@ if (exinfo$GDODCPUCLLNL || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_U out[smallx] <- x[smallx] - min.delta } } - } else if (type == "tricub") { + } else if (isTRUE(type == "tricub")) { sfun <- splinefun(x = fobj$par$modq[, 1], y = fobj$par$fitq[, 1], method = spline_method) #only "monoH.FC" would be appropriate here because we would want a monotone function if possible temp <- sfun(x[wet]) @@ -1420,44 +1580,58 @@ if (exinfo$GDODCPUCLLNL || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_U # 2a) arising from non-monotone splines ('fmm' and 'natural') # 2b) arising from numerical instabilities in the exact monotonicity for 'monoH.FC' - if (!is.null(monthly_extremes) && !(correctSplineFun_type == "none")) { + if (!is.null(monthly_extremes) && !isTRUE(fix_spline == "none")) { # version previous to 20150705 didn't catch several bad cases, e.g., ix = 180099 # to prevent huge oscillation in 'fmm' and 'natural', we need to bound values between some small and some not-too large number # apparently 'monoH.FC' does also show huge oscillations, e.g., ix=82529 because of numerical instabilities in the exact monotonicity in fobj$par$modq[, 1] - icount <- 1 - while ((itemp <- sum((temp < monthly_extremes[1]) | (temp > monthly_extremes[2]))) > 0 && icount < 10) { - if (correctSplineFun_type == "fail") stop("Out-of-range splinefun values and 'correctSplineFun_type' set to fail") - sfun <- splinefun(x=jitter(fobj$par$modq[, 1]), y=jitter(fobj$par$fitq[, 1]), method=spline_method) - temp <- sfun(x[wet]) - icount <- icount + 1 - } - if (itemp > 0) stop("Jitter failed to fix out-of-range splinefun values") + icount <- 1 + while ((itemp <- sum((temp < monthly_extremes[1]) | (temp > monthly_extremes[2]))) > 0 && icount < 10) { + if (fix_spline == "fail") stop("Out-of-range splinefun values and 'fix_spline' set to fail") + sfun <- splinefun(x=jitter(fobj$par$modq[, 1]), y=jitter(fobj$par$fitq[, 1]), method=spline_method) + temp <- sfun(x[wet]) + icount <- icount + 1 + } + if (itemp > 0) + stop("'doQmapQUANT.default_drs': jitter failed to fix out-of-range splinefun values") } out[wet] <- temp + + } else { + stop(paste("'doQmapQUANT.default_drs': unkown type", shQuote(type))) } + out[!wet] <- 0 - if (!is.null(fobj$wet.day)) out[out < 0] <- 0 + if (!is.null(fobj$wet.day)) + out[out < 0] <- 0 out }) - doQmapQUANT_drs <- compiler::cmpfun(function(x, fobj, type=c("linear_Boe", "linear_Thermessl2012CC.QMv1b", "linear_none", "tricub_fmm", "tricub_monoH.FC", "tricub_natural", "normal_anomalies"), montly_obs_base=NULL, monthly_extremes=NULL, correctSplineFun_type=c("fail", "none", "attempt"), ...) { - correctSplineFun_type <- match.arg(correctSplineFun_type) - type <- match.arg(type) - temp <- strsplit(type, "_", fixed=TRUE)[[1]] + doQmapQUANT_drs <- compiler::cmpfun(function(x, fobj, type = NULL, montly_obs_base = NULL, + monthly_extremes = NULL, fix_spline = NULL, ...) { + + fix_spline <- match.arg(fix_spline, c(NA, "fail", "none", "attempt")) + type <- match.arg(type, c("NA_NA", "linear_Boe", "linear_Thermessl2012CC.QMv1b", + "linear_none", "tricub_fmm", "tricub_monoH.FC", "tricub_natural", "normal_anomalies")) + temp <- strsplit(type, "_", fixed = TRUE)[[1]] type <- temp[1] type_mod <- temp[2] - if (type == "linear") { - out <- try(doQmapQUANT.default_drs(x, fobj, type="linear", linear_extrapolation=type_mod, monthly_extremes=monthly_extremes, correctSplineFun_type=correctSplineFun_type, ...), silent=TRUE) - } else if (type == "tricub") { - out <- try(doQmapQUANT.default_drs(x, fobj, type="tricub", spline_method=type_mod, monthly_extremes=monthly_extremes, correctSplineFun_type=correctSplineFun_type, ...), silent=TRUE) - } else if (type == "normal") { + if (isTRUE(type == "linear")) { + out <- doQmapQUANT.default_drs(x, fobj, type = "linear", lin_extrapol = type_mod, + monthly_extremes = monthly_extremes, fix_spline = fix_spline, ...) + + } else if (isTRUE(type == "tricub")) { + out <- doQmapQUANT.default_drs(x, fobj, type = "tricub", spline_method = type_mod, + monthly_extremes = monthly_extremes, fix_spline = fix_spline, ...) + + } else if (isTRUE(type == "normal")) { # Tohver, I. M., A. F. Hamlet, and S.-Y. Lee. 2014. Impacts of 21st-Century Climate Change on Hydrologic Extremes in the Pacific Northwest Region of North America. Journal of the American Water Resources Association 50:1461-1476. # Appendix A, p. 6: "... values that are outside the observed quantile map (e.g. in the early parts of the 20th century) are interpolated using standard anomalies (i.e. number of standard deviations from the mean) calculated for the observed data and GCM data. Although this approach ostensibly assumes a normal distribution, it was found during testing to be much more stable than attempts to use more sophisticated approaches. In particular, the use of Extreme Value Type I or Generalized Extreme Value distributions for extending the tail of the probability distributions were both found to be highly unstable in practice and introduced unacceptable daily extremes in isolated grid cells. These errors occur because of irregularities in the shapes of the CDFs for observed and GCM data, which relates in part to the relatively small sample size used to construct the monthly CDFs (i.e. n = 30)." - out <- doQmapQUANT.default_drs(x, fobj, type="linear", linear_extrapolation="Boe", monthly_extremes=monthly_extremes, correctSplineFun_type=correctSplineFun_type, ...) + out <- doQmapQUANT.default_drs(x, fobj, type = "linear", lin_extrapol = "Boe", + monthly_extremes = monthly_extremes, fix_spline = fix_spline, ...) target_range <- c(-Inf, fobj$par$modq[1, 1] - tol, max(fobj$par$modq[, 1]) + tol, Inf) # -Inf, smallest observed value, largest observed value, Inf out_of_range <- !(findInterval(x, target_range) == 2) @@ -1466,7 +1640,10 @@ if (exinfo$GDODCPUCLLNL || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_U tscore_x <- (x[out_of_range] - mean(montly_obs_base)) / sd(montly_obs_base) out[out_of_range] <- mean(out[!out_of_range]) + sd(out[!out_of_range]) * tscore_x } - } + + } else { + stop(paste("'doQmapQUANT.drs': unkown type", shQuote(type))) + } out }) @@ -1489,117 +1666,181 @@ if (exinfo$GDODCPUCLLNL || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_U #' @references Wang, L., and W. Chen. 2014. Equiratio cumulative distribution function matching as an improvement to the equidistant approach in bias correction of precipitation. Atmospheric Science Letters 15:1-6. #' @references Gudmundsson, L., Bremnes, J.B., Haugen, J.E. & Engen-Skaugen, T. (2012). Technical Note: Downscaling RCM precipitation to the station scale using statistical transformations - a comparison of methods. Hydrol Earth Syst Sci, 16, 3383-3390. #' @export - downscale.deltahybrid3mod <- compiler::cmpfun(function(obs.hist.daily, obs.hist.monthly, scen.hist.monthly, scen.fut.monthly, - deltaFuture_yr, years = NULL, - DScur_startyear = NULL, DScur_endyear = NULL, - DSfut_startyear = NULL, DSfut_endyear = NULL, - downscaling.options = list(extrapol_type = "linear_Thermessl2012CC.QMv1b", - applyPPT_type = "detailed", - sigmaN = 6, - PPTratioCutoff = 10, - correctSplineFun_type = "attempt"), - dailyPPTceiling, monthly_extremes, - do_checks = TRUE) { - - stopifnot(requireNamespace("qmap")) - qstep <- 0.01 - nboot <- 1 + downscale.deltahybrid3mod <- compiler::cmpfun(function( + obs.hist.daily, obs.hist.monthly, scen.hist.monthly, scen.fut.monthly, + deltaFuture_yr, years = NULL, + DScur_startyear = NULL, DScur_endyear = NULL, + DSfut_startyear = NULL, DSfut_endyear = NULL, + opt_DS = list( + extrapol_type = "linear_Thermessl2012CC.QMv1b", + ppt_type = "detailed", + sigmaN = 6, + PPTratioCutoff = 10, + fix_spline = "attempt"), + dailyPPTceiling, monthly_extremes, + do_checks = TRUE, ...) { + + stopifnot(requireNamespace("qmap")) + qstep <- 0.01 + nboot <- 1 + + # Time periods + tp <- downscale.periods(obs.hist.daily, obs.hist.monthly, scen.hist.monthly, scen.fut.monthly, + years, DScur_startyear, DScur_endyear, DSfut_startyear, DSfut_endyear) + + if (any(!tp$iuse_obs_hist_d)) + obs.hist.daily <- obs.hist.daily[tp$iuse_obs_hist_d] + if (any(!tp$iuse_obs_hist_m)) + obs.hist.monthly <- obs.hist.monthly[tp$iuse_obs_hist_m, ] + if (any(!tp$iuse_scen_hist_m)) + scen.hist.monthly <- scen.hist.monthly[tp$iuse_scen_hist_m, ] + if (any(!tp$iuse_scen_fut_m)) + scen.fut.monthly <- scen.fut.monthly[tp$iuse_scen_fut_m, ] + + # Data objects + sbc.hist.monthly <- matrix(NA, nrow = nrow(scen.hist.monthly), ncol = 5, + dimnames = list(NULL, colnames(obs.hist.monthly))) + sbc.hist.monthly[, 1:2] <- scen.hist.monthly[, 1:2] + + sbc.fut.monthly <- matrix(NA, nrow = nrow(scen.fut.monthly), ncol = 5, + dimnames = list(NULL, colnames(obs.hist.monthly))) + sbc.fut.monthly[, 1:2] <- scen.fut.monthly[, 1:2] + + # future simulation years = delta + simstartyr:endyr + hd.fut.monthly <- delta_ts <- matrix(NA, nrow = nrow(obs.hist.monthly), ncol = 5, + dimnames = list(NULL, colnames(obs.hist.monthly))) + hd.fut.monthly[, 1:2] <- delta_ts[, 1:2] <- obs.hist.monthly[, 1:2] + hd.fut.monthly[, 1] <- hd.fut.monthly[, 1] + deltaFuture_yr + + + #------STEPS 1-4 based on the appendix of Tohver et al. 2014 + for (iv in 1:3) { # for each variable separately: Tmax, Tmin, PPT + # NAs in scenario data: impute with median conditions + # TODO(drs): implement a more sophisticated imputation scheme; this one biases variation downwards + if (anyNA(scen.hist.monthly[, 2 + iv])) { + id_nas <- is.na(scen.hist.monthly[, 2 + iv]) + scen.hist.monthly[id_nas, 2 + iv] <- median(scen.hist.monthly[, 2 + iv], na.rm = TRUE) + } + + if (anyNA(scen.fut.monthly[, 2 + iv])) { + id_nas <- is.na(scen.fut.monthly[, 2 + iv]) + scen.fut.monthly[id_nas, 2 + iv] <- median(scen.fut.monthly[, 2 + iv], na.rm = TRUE) + } + + #---STEP 1: Statistical bias correction of GCM data + # 1st part of this step is NOT carried out here because our GCM data is already BCSD downscaled: "first aggregating the gridded T and P observations to the GCM grid scale (at the time of this writing typically about 200km resolution)" + + # fit quantile map based on training data of same historic time period + qm_fit <- qmap::fitQmapQUANT.default(obs = obs.hist.monthly[, 2 + iv], + mod = scen.hist.monthly[, 2 + iv], qstep = qstep, nboot = nboot, wet.day = FALSE) + + # 2nd part: bias correcting historic data ("then using quantile mapping techniques to remove the systematic bias in the GCM simulations relative to the observed probability distributions") + sbc.hist.monthly[, 2 + iv] <- doQmapQUANT_drs(x = scen.hist.monthly[, 2 + iv], + fobj = qm_fit,type = opt_DS[["extrapol_type"]], + montly_obs_base = obs.hist.monthly[, 2 + iv], + monthly_extremes = monthly_extremes[[iv]], + fix_spline = opt_DS[["fix_spline"]]) + + # 3rd part: bias correcting future data ("the same quantile map between simulations and observations is used to transform the future simulations from the GCM") + sbc.fut.monthly[, 2 + iv] <- doQmapQUANT_drs(x = scen.fut.monthly[, 2 + iv], fobj = qm_fit, + type = opt_DS[["extrapol_type"]], + montly_obs_base = obs.hist.monthly[, 2 + iv], + monthly_extremes = monthly_extremes[[iv]], + fix_spline = opt_DS[["fix_spline"]]) + + + #---STEP 2: Spatial downscaling + # - "the monthly T and P values at the GCM grid scale are interpolated to the fine scale grid" + # -> not done here because spatial aggregation (step 1, 1st part) not carried out + + for (im in 1:12) { # for each month separately + #---STEP 3: Remapping the Historical Record to Interpolated GCM data + id_sim_months <- obs.hist.monthly[, "Month"] == im #identical(obs.hist.monthly[, 2], hd.fut.monthly[, 2]) + + qm_fitm <- qmap::fitQmapQUANT.default(obs = sbc.fut.monthly[sbc.fut.monthly[, 2] == im, 2 + iv], + mod = obs.hist.monthly[id_sim_months, 2 + iv], qstep = qstep, nboot = nboot, + wet.day = FALSE) + + hd.fut.monthly[id_sim_months, 2 + iv] <- doQmapQUANT_drs( + x = obs.hist.monthly[id_sim_months, 2 + iv], + fobj = qm_fitm, type = opt_DS[["extrapol_type"]], + montly_obs_base = obs.hist.monthly[, 2 + iv], + monthly_extremes = monthly_extremes[[iv]], + fix_spline = opt_DS[["fix_spline"]]) + } + } - # Time periods - tp <- downscale.periods(obs.hist.daily, obs.hist.monthly, scen.hist.monthly, scen.fut.monthly, years, DScur_startyear, DScur_endyear, DSfut_startyear, DSfut_endyear) - if (any(!tp$iuse_obs_hist_d)) obs.hist.daily <- obs.hist.daily[tp$iuse_obs_hist_d] - if (any(!tp$iuse_obs_hist_m)) obs.hist.monthly <- obs.hist.monthly[tp$iuse_obs_hist_m, ] - if (any(!tp$iuse_scen_hist_m)) scen.hist.monthly <- scen.hist.monthly[tp$iuse_scen_hist_m, ] - if (any(!tp$iuse_scen_fut_m)) scen.fut.monthly <- scen.fut.monthly[tp$iuse_scen_fut_m, ] + #---STEP 4: Daily Time Step Disaggregation of Monthly Data + delta_ts[, c("Tmax_C", "Tmin_C")] <- hd.fut.monthly[, c("Tmax_C", "Tmin_C")] - + obs.hist.monthly[, c("Tmax_C", "Tmin_C")] # equation 8 - # Data objects - sbc.hist.monthly <- matrix(NA, ncol=5, nrow=nrow(scen.hist.monthly), dimnames=list(NULL, colnames(obs.hist.monthly))) - sbc.hist.monthly[, 1:2] <- scen.hist.monthly[, 1:2] - sbc.fut.monthly <- matrix(NA, ncol=5, nrow=nrow(scen.fut.monthly), dimnames=list(NULL, colnames(obs.hist.monthly))) - sbc.fut.monthly[, 1:2] <- scen.fut.monthly[, 1:2] - # future simulation years = delta + simstartyr:endyr - hd.fut.monthly <- delta_ts <- matrix(NA, ncol=5, nrow=nrow(obs.hist.monthly), dimnames=list(NULL, colnames(obs.hist.monthly))) - hd.fut.monthly[, 1:2] <- delta_ts[, 1:2] <- obs.hist.monthly[, 1:2] - hd.fut.monthly[, 1] <- hd.fut.monthly[, 1] + deltaFuture_yr + ppt_fun <- rep("*", 12) + delta_ppts <- hd.fut.monthly[, "PPT_cm"] / obs.hist.monthly[, "PPT_cm"] # equation 7 - #------STEPS 1-4 based on the appendix of Tohver et al. 2014 - for (iv in 1:3) { # for each variable separately: Tmax, Tmin, PPT - # NAs in scenario data: impute with median conditions (TODO(drs): implement a more sophisticated imputation scheme; this one biases variation downwards) - if (anyNA(scen.hist.monthly[, 2 + iv])) { - id_nas <- is.na(scen.hist.monthly[, 2 + iv]) - scen.hist.monthly[id_nas, 2 + iv] <- median(scen.hist.monthly[, 2 + iv], na.rm=TRUE) - } - if (anyNA(scen.fut.monthly[, 2 + iv])) { - id_nas <- is.na(scen.fut.monthly[, 2 + iv]) - scen.fut.monthly[id_nas, 2 + iv] <- median(scen.fut.monthly[, 2 + iv], na.rm=TRUE) - } + temp_add <- is.infinite(delta_ppts) | is.nan(delta_ppts) | + delta_ppts > opt_DS[["PPTratioCutoff"]] | + delta_ppts < 1 / opt_DS[["PPTratioCutoff"]] - #---STEP 1: Statistical bias correction of GCM data - # 1st part of this step is NOT carried out here because our GCM data is already BCSD downscaled: "first aggregating the gridded T and P observations to the GCM grid scale (at the time of this writing typically about 200km resolution)" + if (any(temp_add)) { + ids_m <- unique(delta_ts[temp_add, "Month"]) + ppt_fun[ids_m] <- "+" + temp_m <- delta_ts[, "Month"] %in% ids_m # all calendar month for which at least one instance qualifies for additive PPT + delta_ppts[temp_m] <- hd.fut.monthly[temp_m, "PPT_cm"] - obs.hist.monthly[temp_m, "PPT_cm"] + } + delta_ts[, "PPT_cm"] <- delta_ppts - # fit quantile map based on training data of same historic time period - qm_fit <- qmap::fitQmapQUANT.default(obs=obs.hist.monthly[, 2 + iv], mod=scen.hist.monthly[, 2 + iv], qstep=qstep, nboot=nboot, wet.day=FALSE) + # Apply deltas to historic daily weather + # Note: PPT differs from call to call to applyDeltas() because of controlExtremePPTevents (if dailyPPTceiling > 0) + applyDeltas2(daily = obs.hist.daily, monthly = obs.hist.monthly, years = tp$years, + delta_ts, ppt_fun, ppt_type = opt_DS[["ppt_type"]], dailyPPTceiling, + sigmaN = opt_DS[["sigmaN"]], do_checks = do_checks) + }) - # 2nd part: bias correcting historic data ("then using quantile mapping techniques to remove the systematic bias in the GCM simulations relative to the observed probability distributions") - temp <- try(doQmapQUANT_drs(x=scen.hist.monthly[, 2 + iv], fobj=qm_fit, type=downscaling.options[["extrapol_type"]], montly_obs_base=obs.hist.monthly[, 2 + iv], monthly_extremes=monthly_extremes[[iv]], correctSplineFun_type=downscaling.options[["correctSplineFun_type"]]), silent = TRUE) - if (inherits(temp, "try-error")) return(temp) - sbc.hist.monthly[, 2 + iv] <- temp - # 3rd part: bias correcting future data ("the same quantile map between simulations and observations is used to transform the future simulations from the GCM") - temp <- try(doQmapQUANT_drs(x=scen.fut.monthly[, 2 + iv], fobj=qm_fit, type=downscaling.options[["extrapol_type"]], montly_obs_base=obs.hist.monthly[, 2 + iv], monthly_extremes=monthly_extremes[[iv]], correctSplineFun_type=downscaling.options[["correctSplineFun_type"]]), silent = TRUE) - if (inherits(temp, "try-error")) return(temp) - sbc.fut.monthly[, 2 + iv] <- temp + #-------DB access functions - #---STEP 2: Spatial downscaling - # - "the monthly T and P values at the GCM grid scale are interpolated to the fine scale grid" - # -> not done here because spatial aggregation (step 1, 1st part) not carried out + #' Converts precipitation data to values in cm / month + convert_precipitation <- compiler::cmpfun(function(x, unit_conv, dpm) { + if (unit_conv %in% c("mm/month", "mm month-1")) { + x <- x / 10 - for (im in 1:12) { # for each month separately - #---STEP 3: Remapping the Historical Record to Interpolated GCM data - id_sim_months <- obs.hist.monthly[, "Month"] == im #identical(obs.hist.monthly[, 2], hd.fut.monthly[, 2]) + } else if (unit_conv %in% c("mm/d", "mm d-1")) { + x <- x * dpm / 10 - qm_fitm <- qmap::fitQmapQUANT.default(obs=sbc.fut.monthly[sbc.fut.monthly[, 2] == im, 2 + iv], mod=obs.hist.monthly[id_sim_months, 2 + iv], qstep=qstep, nboot=nboot, wet.day=FALSE) - temp <- try(doQmapQUANT_drs(x=obs.hist.monthly[id_sim_months, 2 + iv], fobj=qm_fitm, type=downscaling.options[["extrapol_type"]], montly_obs_base=obs.hist.monthly[, 2 + iv], monthly_extremes=monthly_extremes[[iv]], correctSplineFun_type=downscaling.options[["correctSplineFun_type"]]), silent = TRUE) - if (inherits(temp, "try-error")) return(temp) - hd.fut.monthly[id_sim_months, 2 + iv] <- temp - } - } + } else if (unit_conv %in% c("kg/m2/s", "kg m-2 s-1", "mm/s", "mm s-1")) { + x <- x * dpm * 8640 - #---STEP 4: Daily Time Step Disaggregation of Monthly Data - delta_ts[, c("Tmax_C", "Tmin_C")] <- hd.fut.monthly[, c("Tmax_C", "Tmin_C")] - obs.hist.monthly[, c("Tmax_C", "Tmin_C")] # equation 8 + } else if (unit_conv %in% c("cm/month", "cm month-1")) { - ppt_fun <- rep("*", 12) - delta_ppts <- hd.fut.monthly[, "PPT_cm"] / obs.hist.monthly[, "PPT_cm"] # equation 7 - temp_add <- is.infinite(delta_ppts) | is.nan(delta_ppts) | delta_ppts > downscaling.options[["PPTratioCutoff"]] | delta_ppts < 1 / downscaling.options[["PPTratioCutoff"]] - if (any(temp_add)) { - ids_m <- unique(delta_ts[temp_add, "Month"]) - ppt_fun[ids_m] <- "+" - temp_m <- delta_ts[, "Month"] %in% ids_m # all calendar month for which at least one instance qualifies for additive PPT - delta_ppts[temp_m] <- hd.fut.monthly[temp_m, "PPT_cm"] - obs.hist.monthly[temp_m, "PPT_cm"] - } - delta_ts[, "PPT_cm"] <- delta_ppts + } else stop("Unknown precipitation unit: ", unit_conv) - # Apply deltas to historic daily weather - # Note: PPT differs from call to call to applyDeltas() because of controlExtremePPTevents (if dailyPPTceiling > 0) - try(applyDeltas2(daily = obs.hist.daily, monthly = obs.hist.monthly, - years = tp$years, delta_ts, ppt_fun, ppt_type = downscaling.options[["applyPPT_type"]], dailyPPTceiling, sigmaN = downscaling.options[["sigmaN"]], do_checks = do_checks), silent = FALSE) - }) + x + }) + #' Converts temperature data to values in degree Celsius + convert_temperature <- compiler::cmpfun(function(x, unit_conv) { + if (unit_conv == "K") { + x <- x - 273.15 + } else if (unit_conv == "F") { + x <- (x - 32) * 0.5555556 - #-------DB access functions - if (exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_USA) { + } else if (unit_conv == "C") { + + } else stop("Unknown temperature unit: ", unit_conv[1]) + + x + }) - mmPerSecond_to_cmPerMonth <- compiler::cmpfun(function(prcp_mmPerSecond, dpm) { - prcp_mmPerSecond * dpm * 8640 # 24 * 60 * 60 / 10 == 8640 - }) - get.request <- compiler::cmpfun(function(service, request, i, variable, scen, gcm, lon, lat, startyear, endyear, dir.out.temp, useRCurl, saveNEXtempfiles) { - if (useRCurl && !saveNEXtempfiles) { - success <- try(getURL(request, .opts=list(timeout=5*60, connecttimeout=60)), silent=TRUE) + if (any(exinfo$which_NEX)) { + + get.request <- compiler::cmpfun(function(service, request, i, variable, scen, gcm, lon, lat, startyear, endyear, dir.out.temp) { + if (requireNamespace("RCurl")) { + success <- try(RCurl::getURL(request, .opts=list(timeout=5*60, connecttimeout=60))) if (!inherits(success, "try-error")) { - if (isTRUE(grepl("Not Found", success, ignore.case=TRUE))) { + if (isTRUE(grepl("Not Found", success, ignore.case = TRUE))) { class(success) <- "try-error" } else { if (service == "ncss") { @@ -1614,7 +1855,7 @@ if (exinfo$GDODCPUCLLNL || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_U } else { if (service == "opendap") stop("Curl must be present to access NEX-DCP30 data via thredds/dodsC (opendap)") ftemp <- file.path(dir.out.temp, paste0("NEX_", gcm, "_", scen, "_", variable, "_", round(lat, 5), "&", round(lon, 5), ".csv")) - success <- try(download.file(url=request, destfile=ftemp, quiet=TRUE), silent=TRUE) + success <- try(download.file(url=request, destfile=ftemp, quiet=TRUE)) } yearsN <- endyear - startyear + 1 @@ -1627,7 +1868,7 @@ if (exinfo$GDODCPUCLLNL || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_U } else if (service == "opendap") { vtemp <- read.csv(ftemp, colClasses=c("NULL", "numeric"), header=FALSE)[-1, ] #columns = Index, Variable } - if (!saveNEXtempfiles && !useRCurl && file.exists(ftemp)) unlink(ftemp) + if (file.exists(ftemp)) unlink(ftemp) if (length(vtemp) < 12*yearsN) { #some GCMs only have values up to Nov 2099 tempYearMonth <- paste(ttemp$year + 1900, ttemp$mo + 1, sep="_") targetYearMonth <- paste(rep(startyear:endyear, each=12), rep(1:12, times=yearsN), sep="_") @@ -1644,7 +1885,7 @@ if (exinfo$GDODCPUCLLNL || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_U }) - get.DBvariable <- compiler::cmpfun(function(i, variable, scen, gcm, lon, lat, bbox, startyear, endyear, dir.out.temp, useRCurl, saveNEXtempfiles) { + get.DBvariable <- compiler::cmpfun(function(i, variable, scen, gcm, lon, lat, bbox, tbox, startyear, endyear, dir.out.temp) { gcmrun <- "r1i1p1" #1st attempt: TRHEDDS ncss/netCDF subsetting service request <- paste0(paste("http://dataserver.nccs.nasa.gov", "thredds/ncss/grid/bypass/NEX-DCP30/bcsd", scen, gcmrun, @@ -1652,24 +1893,24 @@ if (exinfo$GDODCPUCLLNL || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_U "&latitude=", lat, "&longitude=", ifelse(lon > 180, lon - 360, lon), paste0("&time_start=", startyear, "-01-01T00%3A00%3A00Z&time_end=", endyear, "-12-31T23%3A59%3A59Z&timeStride=1"), "&accept=csv") - dat <- get.request(service="ncss", request, i, variable, scen, gcm, lon, lat, startyear, endyear, dir.out.temp, useRCurl, saveNEXtempfiles) + dat <- get.request(service="ncss", request, i, variable, scen, gcm, lon, lat, startyear, endyear, dir.out.temp) if (inherits(dat, "try-error") || any(dat > 1e5 | dat < -1e5, na.rm=TRUE)) { #thredds/ncss/ returns for some GCMs/RCPs/locations unrealistic large values, e.g., 9.969210e+36 and sometimes 2.670153e+42 for pr, tasmin, and tasmax for the month of May in every fifth year (2071, 2076, ...): bug report to NASA NCCS Support Team on June 2, 2014 - confirmed on June 8, 2014 by Yingshuo Shen (issue=48932) #2nd attempt: TRHEDDS opendap/dodsC lat.index <- round((lat - bbox$lat[1]) / 0.0083333333, 0) lon.index <- round((lon - bbox$lon[1]) / 0.0083333333, 0) if (startyear < 2006 && scen == "historical") { - index.time.start <- (startyear - 1950) * 12 - index.time.end <- (endyear + 1 - 1950) * 12 - 1 + index.time.start <- (startyear - tbox["start", "first"]) * 12 + index.time.end <- (endyear + 1 - tbox["start", "first"]) * 12 - 1 } else { - index.time.start <- (startyear - 2006) * 12 - index.time.end <- (endyear + 1 - 2006) * 12 - 1 + index.time.start <- (startyear - tbox["start", "second"]) * 12 + index.time.end <- (endyear + 1 - tbox["start", "second"]) * 12 - 1 } request <- paste0(paste("http://dataserver.nccs.nasa.gov", "thredds/dodsC/bypass/NEX-DCP30/bcsd", scen, gcmrun, paste0(gcm, "_", variable, ".ncml.ascii"), sep="/"), "?lat[", lat.index, "],lon[", lon.index, "],", gcm, "_", variable, "[", index.time.start, ":1:", index.time.end, "][", lat.index, "][", lon.index, "]") - dat <- get.request(service="opendap", request, i, variable, scen, gcm, lon, lat, startyear, endyear, dir.out.temp, useRCurl, saveNEXtempfiles) + dat <- get.request(service="opendap", request, i, variable, scen, gcm, lon, lat, startyear, endyear, dir.out.temp) stopifnot(!inherits(dat, "try-error"), all(dat < 1e5 & dat > -1e5, na.rm=TRUE)) } @@ -1679,43 +1920,56 @@ if (exinfo$GDODCPUCLLNL || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_U #' @return A list of one data.frame object with 5 columns and names of #' "year", "month", "tmax", "tmin", and "prcp". Each row is one day. #' Units are [degree Celsius] for temperature and [cm / day] and [cm / month], respectively, for precipitation. - get_GCMdata <- compiler::cmpfun(function(i, ts_mons, dpm, gcm, scen, lon, lat, startyear, endyear, varTags, ...) { - dots <- list(...) # bbox, dir.out.temp, useRCurl, saveNEXtempfiles + get_GCMdata <- compiler::cmpfun(function(i, ts_mons, dpm, gcm, scen, lon, lat, startyear, endyear, climDB_meta, ...) { + dots <- list(...) # dir.out.temp + + n_var <- 3 + clim <- vector("list", length = n_var) + names(clim) <- row.names(climDB_meta[["var_desc"]])[seq_len(n_var)] - clim <- vector("list", length=3) - names(clim) <- varTags + for (iv in seq_len(n_var)) { + var_tag <- climDB_meta[["var_desc"]][iv, "tag"] + unit_conv <- climDB_meta[["var_desc"]][iv, "unit_real"] - for (iv in seq_along(varTags)) { #Extract data - clim[[iv]] <- get.DBvariable(i, variable=varTags[iv], scen=scen, gcm=gcm, lon=lon, lat=lat, bbox = dots[["bbox"]], startyear=startyear, endyear=endyear, dir.out.temp = dots[["dir.out.temp"]], useRCurl = dots[["useRCurl"]], saveNEXtempfiles = dots[["saveNEXtempfiles"]]) + clim[[iv]] <- get.DBvariable(i, variable = var_tag, + scen = scen, gcm = gcm, lon = lon, lat = lat, + bbox = climDB_meta[["bbox"]], tbox = climDB_meta[["tbox"]], + startyear = startyear, endyear = endyear, dir.out.temp = dots[["dir.out.temp"]]) + #Adjust units - if (varTags[iv] == "pr") {#convert kg/m2/s -> cm/month - clim[[iv]] <- mmPerSecond_to_cmPerMonth(prcp_mmPerSecond=clim[[iv]], dpm) - } else if (grepl("tas", varTags[iv])) { #convert K -> C - clim[[iv]] <- clim[[iv]] - 273.15 + if (var_tag == "pr") { + clim[[iv]] <- convert_precipitation(clim[[iv]], unit_conv, dpm) + + } else if (grepl("tas", var_tag, ignore.case = TRUE)) { + clim[[iv]] <- convert_temperature(clim[[iv]], unit_conv) } } - #Monthly weather time-series - list(cbind(year=ts_mons$year + 1900, month=ts_mons$mon + 1, tmax = clim[["tasmax"]], tmin = clim[["tasmin"]], prcp = clim[["pr"]])) + #Monthly weather time-series (var names as in 'var_names_fixed') + list(cbind(year = ts_mons$year + 1900, + month = ts_mons$mon + 1, + tmax = clim[["tmax"]], tmin = clim[["tmin"]], prcp = clim[["prcp"]])) }) } - if (exinfo$GDODCPUCLLNL) { + if (any(exinfo$which_netCDF)) { whereNearest <- compiler::cmpfun(function(val, matrix) { #this returns the index of the closest value in the matrix to the passed in value. which.min(abs(matrix - val)) }) get.SpatialIndices <- compiler::cmpfun(function(filename, lon, lat) { - nc <- nc_open(filename=filename, write=FALSE, readunlim=TRUE, verbose=FALSE) + stopifnot(requireNamespace("ncdf4")) + + nc <- ncdf4::nc_open(filename=filename, write=FALSE, readunlim=TRUE, verbose=FALSE) #Get latitudes/longitudes from the netCDF files...; they are the same for each CMIP x extent # - these are used to get the correct indices in the whereNearest function lats <- nc$dim$lat$vals lons <- nc$dim$lon$vals #close the netCDF file - nc_close(nc) + ncdf4::nc_close(nc) if (any(lons > 180)) lons <- ifelse(lons > 180, lons - 360, lons) #Calculate the spatial indices @@ -1727,27 +1981,80 @@ if (exinfo$GDODCPUCLLNL || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_U }) get.TimeIndices <- compiler::cmpfun(function(filename, startyear, endyear) { - nc <- nc_open(filename=filename, write=FALSE, readunlim=TRUE, verbose=FALSE) + stopifnot(requireNamespace("ncdf4")) + + nc <- ncdf4::nc_open(filename=filename, write=FALSE, readunlim=TRUE, verbose=FALSE) utemp <- nc$dim$time$units tvals <- nc$dim$time$vals - nc_close(nc) + calendar <- nc$dim$time$calendar + ncdf4::nc_close(nc) N <- length(tvals) - temp <- lapply(strsplit(utemp, split=" ", fixed=TRUE)[[1]], function(x) as.Date(x, format="%Y-%m-%d")) - baseYear <- temp[sapply(temp, function(x) !is.na(x))][[1]] - stopifnot(length(baseYear) == 1) - - firstDate <- as.POSIXlt(baseYear + tvals[1], tz = "UTC") - lastDate <- as.POSIXlt(baseYear + tvals[N], tz = "UTC") - - startYear <- firstDate$year + 1900 - startMonth <- firstDate$mon + 1 - endYear <- lastDate$year + 1900 - endMonth <- lastDate$mon + 1 - - stopifnot(startYear <= startyear || (startMonth == 1 && startYear == startyear)) #we only extract full years and require data from the startyear on - timeStartIndex <- (startyear - startYear) * 12 + 2 - startMonth #we extract beginning with January of startyear + # Start of time axis + utemp <- strsplit(utemp, split = " ", fixed = TRUE)[[1]] + temp <- lapply(utemp, function(x) as.Date(x, format = "%Y-%m-%d")) + tbase <- temp[sapply(temp, function(x) !is.na(x))][[1]] # class 'Date' = # of days since Jan 1, 1970 in Gregorian calendar + stopifnot(length(tbase) == 1) + + tunit <- utemp[1] + # http://cfconventions.org/cf-conventions/v1.6.0/cf-conventions.html#time-coordinate + tunit <- if (grepl("(day)|(d)", tunit, ignore.case = TRUE)) { + 1 + } else if (grepl("(hour)|(h)", tunit, ignore.case = TRUE)) { + 24 + } else if (grepl("(minute)|(min)", tunit, ignore.case = TRUE)) { + 1440 + } else if (grepl("(second)|(sec)", tunit, ignore.case = TRUE) || "s" == tunit) { + 86400 + } else stop("time unit of netCDF not recognized") + + # http://cfconventions.org/cf-conventions/v1.6.0/cf-conventions.html#calendar + cdays <- switch(calendar, + noleap = 365, `365_day` = 365, `all_leap` = 366, `366_day` = 366, `360_day` = 360, + -1) + + if (calendar == "proleptic_gregorian" || calendar == "gregorian" || + calendar == "standard" || is.null(calendar)) { + + temp <- as.POSIXlt(tbase + tvals[1] / tunit, tz = "UTC") + start <- c(year = temp$year + 1900, month = temp$mon + 1) + + temp <- as.POSIXlt(tbase + tvals[N] / tunit, tz = "UTC") + end <- c(year = temp$year + 1900, month = temp$mon + 1) + + } else if (cdays > 0) { + # all years are of a constant fixed duration + temp <- tvals[c(1, N)] / tunit + to_add_years <- temp %/% cdays + to_add_days <- temp %% cdays + + if (cdays > 360) { + temp <- as.POSIXlt(tbase, tz = "UTC") + temp_start <- strptime(paste(temp$year + 1900 + to_add_years[1], + to_add_days[1], sep = "-"), format = "%Y-%j", tz = "UTC") + temp_end <- strptime(paste(temp$year + 1900 + to_add_years[2], + to_add_days[2], sep = "-"), format = "%Y-%j", tz = "UTC") + + start <- c(year = temp_start$year + 1900, month = temp_start$mon + 1) + end <- c(year = temp_end$year + 1900, month = temp_end$mon + 1) + + } else if (cdays == 360) { + # all years are 360 days divided into 30 day months + to_add_months <- floor(to_add_days / 30) + + temp <- as.POSIXlt(tbase, tz = "UTC") + start <- c(year = temp$year + 1900 + to_add_years[1], + month = temp$mon + 1 + to_add_months[1]) + end <- c(year = temp$year + 1900 + to_add_years[2], + month = temp$mon + 1 + to_add_months[2]) + } + + } else stop("calendar of netCDF not recognized") + + + stopifnot(start["year"] <= startyear || (start["month"] == 1 && start["year"] == startyear)) #we only extract full years and require data from the start["year"] on + timeStartIndex <- (startyear - start["year"]) * 12 + 2 - start["month"] #we extract beginning with January of start["year"] #account for missing months: assume all are at the end; e.g., precipitation of 'HadGEM2-ES' has values only until Nov 2099 instead Dec 2100 timeCount_should <- (endyear - startyear + 1) * 12 #timeCount must include a count at timeStartIndex; to extract two values at 1:2, have timeStartIndex=1 and timeCount=2 @@ -1766,41 +2073,51 @@ if (exinfo$GDODCPUCLLNL || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_U }) do_ncvar_get <- compiler::cmpfun(function(nc, nc_perm, variable, ncg, nct) { + stopifnot(requireNamespace("ncdf4")) + index <- which("time" == nc_perm) - if (index == 3L) { #if file is in order of (lat, lon, time) - ncvar_get(nc, variable, start=c(ncg$ix, ncg$iy, nct$timeStartIndex), count=c(1, 1, nct$timeCount)) - } else if (index == 1L) { #if file is optimized for time series extraction and permutated to order (time, lat, lon) - ncvar_get(nc, variable, start=c(nct$timeStartIndex, ncg$ix, ncg$iy), count=c(nct$timeCount, 1, 1)) + if (index == 3L) { + # if file is in order of (lat, lon, time) + ncdf4::ncvar_get(nc, variable, start = c(ncg$ix, ncg$iy, nct$timeStartIndex), + count = c(1, 1, nct$timeCount)) + + } else if (index == 1L) { + # if file is optimized for time series extraction and permutated to order (time, lat, lon) + ncdf4::ncvar_get(nc, variable, start = c(nct$timeStartIndex, ncg$ix, ncg$iy), + count = c(nct$timeCount, 1, 1)) + } else { stop("do_ncvar_get: dimension 'time' must be either in first or third place, but is instead at ", index) } }) - mmPerDay_to_cmPerMonth <- compiler::cmpfun(function(prcp_mmPerDay, dpm) { - prcp_mmPerDay / 10 * dpm - }) - get.DBvariable <- compiler::cmpfun(function(filepath, variable, unit, ncg, nct, lon, lat, startyear, endyear) { + stopifnot(requireNamespace("ncdf4")) # the 'raster' package (version <= '2.5.2') cannot handle non-equally spaced cells - nc <- nc_open(filename = filepath, write = FALSE, readunlim = TRUE, verbose = FALSE) - stopifnot(grepl(unit, nc$var[[variable]]$units, fixed = TRUE)) + nc <- ncdf4::nc_open(filename = filepath, write = FALSE, readunlim = TRUE, verbose = FALSE) + + stopifnot(isTRUE(tolower(unit) == tolower(nc$var[[variable]]$units))) # getting the values from the netCDF files... nc_perm <- sapply(nc$var[[variable]]$dim, function(x) x$name) - res <- try(do_ncvar_get(nc, nc_perm, variable, ncg, nct), silent = TRUE) - if (inherits(res, "try-error")) { #in case of 'HadGEM2-ES x RCP45' where pr and tasmax/tasmin have different timings + res <- try(do_ncvar_get(nc, nc_perm, variable, ncg, nct)) + if (inherits(res, "try-error")) { + # in case of 'HadGEM2-ES x RCP45' where pr and tasmax/tasmin have different timings ncg <- get.SpatialIndices(filename = filepath, lon, lat) nct <- get.TimeIndices(filename = filepath, startyear, endyear) - res <- try(do_ncvar_get(nc, nc_perm, variable, ncg, nct), silent = TRUE) + res <- do_ncvar_get(nc, nc_perm, variable, ncg, nct) } - nc_close(nc) #close the netCDF file + ncdf4::nc_close(nc) #close the netCDF file #adjust for missing months - if (nct$addMissingMonthAtEnd > 0) res <- c(res, rep(NA, times = nct$addMissingMonthAtEnd)) + if (nct$addMissingMonthAtEnd > 0) + res <- c(res, rep(NA, times = nct$addMissingMonthAtEnd)) - #no data: most likely cell location is not terrestrial - if (all(is.na(res))) stop("get.DBvariable: all data are NAs; cell location (", round(lon, 5), ", ", round(lat, 5), ") is likely not terrestrial") + if (all(is.na(res)) || inherits(res, "try-error")) + stop("get.DBvariable at (", round(lon, 5), ", ", round(lat, 5), "): ", + "extraction failed or no data available. Error message: ", + paste(head(res), collapse = "/")) res }) @@ -1809,291 +2126,347 @@ if (exinfo$GDODCPUCLLNL || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_U #' @return A list of one data.frame object with 5 columns and names of #' "year", "month", "tmax", "tmin", and "prcp". Each row is one day. #' Units are [degree Celsius] for temperature and [cm / day] and [cm / month], respectively, for precipitation. - get_GCMdata <- compiler::cmpfun(function(i, ts_mons, dpm, gcm, scen, lon, lat, startyear, endyear, varTags, ...) { - dots <- list(...) # fileVarTags, ncFiles, ncg, nct + get_GCMdata <- compiler::cmpfun(function(i, ts_mons, dpm, gcm, scen, lon, lat, startyear, endyear, climDB_meta, ...) { + dots <- list(...) # ncFiles, ncg, nct + + # Extract precipitation data + temp1 <- grepl(climDB_meta[["var_desc"]]["prcp", "fileVarTags"], + dots[["ncFiles"]], ignore.case = TRUE) - #Get precipitation data - temp1 <- grepl(dots[["fileVarTags"]]["prcp"], dots[["ncFiles"]]) if (any(temp1)) { - prcp <- get.DBvariable(filepath = dots[["ncFiles"]][temp1][1], variable = varTags["prcp"], unit = "mm/d", ncg = dots[["ncg"]], nct = dots[["ncg"]], lon = lon, lat = lat, startyear = startyear, endyear = endyear) - prcp <- mmPerDay_to_cmPerMonth(prcp, dpm) - } else { - stop("No suitable netCDF file with precipitation data found for ", i, gcm, scen, varTags["prcp"]) + prcp <- get.DBvariable(filepath = dots[["ncFiles"]][temp1][1], + variable = climDB_meta[["var_desc"]]["prcp", "tag"], + unit = climDB_meta[["var_desc"]]["prcp", "unit_given"], + ncg = dots[["ncg"]], nct = dots[["nct"]], lon = lon, lat = lat, + startyear = startyear, endyear = endyear) + + } else { + stop("No suitable netCDF file with precipitation data found for ", i, gcm, scen) } - #Get temperature data - temp3 <- grepl(dots[["fileVarTags"]]["tmin"], dots[["ncFiles"]]) - temp4 <- grepl(dots[["fileVarTags"]]["tmax"], dots[["ncFiles"]]) + + # Extract temperature data + temp3 <- grepl(climDB_meta[["var_desc"]]["tmin", "fileVarTags"], + dots[["ncFiles"]], ignore.case = TRUE) + temp4 <- grepl(climDB_meta[["var_desc"]]["tmax", "fileVarTags"], + dots[["ncFiles"]], ignore.case = TRUE) + if (any(temp3) && any(temp4)) { - tmin <- get.DBvariable(filepath = dots[["ncFiles"]][temp3][1], variable = varTags["tmin"], unit = "C", ncg = dots[["ncg"]], nct = dots[["ncg"]], lon = lon, lat = lat, startyear = startyear, endyear = endyear) - tmax <- get.DBvariable(filepath = dots[["ncFiles"]][temp4][1], variable = varTags["tmax"], unit = "C", ncg = dots[["ncg"]], nct = dots[["ncg"]], lon = lon, lat = lat, startyear = startyear, endyear = endyear) + tmin <- get.DBvariable(filepath = dots[["ncFiles"]][temp3][1], + variable = climDB_meta[["var_desc"]]["tmin", "tag"], + unit = climDB_meta[["var_desc"]]["tmin", "unit_given"], + ncg = dots[["ncg"]], nct = dots[["nct"]], lon = lon, lat = lat, + startyear = startyear, endyear = endyear) + + tmax <- get.DBvariable(filepath = dots[["ncFiles"]][temp4][1], + variable = climDB_meta[["var_desc"]]["tmax", "tag"], + unit = climDB_meta[["var_desc"]]["tmax", "unit_given"], + ncg = dots[["ncg"]], nct = dots[["nct"]], lon = lon, lat = lat, + startyear = startyear, endyear = endyear) + } else { - temp2 <- grepl(dots[["fileVarTags"]]["tmean"], dots[["ncFiles"]]) + temp2 <- grepl(climDB_meta[["var_desc"]]["tmean", "fileVarTags"], + dots[["ncFiles"]], ignore.case = TRUE) + if (any(temp2)) { - tmean <- get.DBvariable(filepath = dots[["ncFiles"]][temp2][1], variable = varTags["tmean"], unit = "C", ncg = dots[["ncg"]], nct = dots[["ncg"]], lon = lon, lat = lat, startyear = startyear, endyear = endyear) + tmean <- get.DBvariable(filepath = dots[["ncFiles"]][temp2][1], + variable = climDB_meta[["var_desc"]]["tmean", "tag"], + unit = climDB_meta[["var_desc"]]["tmean", "unit_given"], + ncg = dots[["ncg"]], nct = dots[["nct"]], lon = lon, lat = lat, + startyear = startyear, endyear = endyear) tmin <- tmax <- tmean + } else { - stop("No suitable netCDF file with temperature data found for ", i, gcm, scen, varTags["prcp"]) + stop("No suitable netCDF file with temperature data found for ", i, gcm, scen) } } - list(cbind(year = ts_mons$year + 1900, month = ts_mons$mon + 1, tmax = tmax, tmin = tmin, prcp = prcp)) + # Convert units + unit_conv <- climDB_meta[["var_desc"]]["prcp", "unit_real"] + prcp <- convert_precipitation(prcp, unit_conv, dpm) + + unit_conv <- climDB_meta[["var_desc"]][c("tmin", "tmax", "tmean"), "unit_real"] + stopifnot(unit_conv[1] == unit_conv[2], unit_conv[1] == unit_conv[3]) + tmin <- convert_temperature(tmin, unit_conv[1]) + tmax <- convert_temperature(tmax, unit_conv[1]) + + list(cbind(year = ts_mons$year + 1900, + month = ts_mons$mon + 1, + tmax = tmax, tmin = tmin, prcp = prcp)) }) } #----Extraction function - calc.ScenarioWeather <- compiler::cmpfun(function(i, GCM_source, is_GDODCPUCLLNL, is_NEX, gcmFiles, reqGCMs, reqRCPsPerGCM, reqDownscalingsPerGCM, climate.ambient, bbox, locations, dbW_iSiteTable, compression_type, varTags, fileVarTags, getYears, assocYears, future_yrs, simstartyr, endyr, DScur_startyr, DScur_endyr, downscaling.options, dir.out.temp, be.quiet, useRCurl, saveNEXtempfiles) { - #Identify index for site and scenario - ig <- (i - 1) %% length(reqGCMs) + 1 - il <- (i - 1) %/% length(reqGCMs) + 1 - - gcm <- reqGCMs[ig] - rcps <- reqRCPsPerGCM[[ig]] - downs <- reqDownscalingsPerGCM[[ig]] - lon <- locations[il, "X_WGS84"] - lat <- locations[il, "Y_WGS84"] - site_id <- locations[il, "site_id"] + calc.ScenarioWeather <- compiler::cmpfun(function(i, clim_source, is_netCDF, is_NEX, + climDB_meta, climDB_files, reqGCMs, reqRCPsPerGCM, + reqDownscalingsPerGCM, climate.ambient, locations, + dbW_iSiteTable, compression_type, getYears, assocYears, + future_yrs, simstartyr, endyr, DScur_startyr, DScur_endyr, + opt_DS, dir.out.temp, be.quiet, print.debug) { + + on.exit({save(list = ls(), + file = file.path(dir.out.temp, paste0("ClimScen_failed_", i, "_l2.RData")))}) + + #Identify index for site and scenario + ig <- (i - 1) %% length(reqGCMs) + 1 + il <- (i - 1) %/% length(reqGCMs) + 1 + + gcm <- reqGCMs[ig] + rcps <- reqRCPsPerGCM[[ig]] + downs <- reqDownscalingsPerGCM[[ig]] + lon <- locations[il, "X_WGS84"] + lat <- locations[il, "Y_WGS84"] + site_id <- locations[il, "site_id"] # site_id <- dbW_iSiteTable[dbW_iSiteTable[, "Label"] == locations[il, "WeatherFolder"], "Site_id"] - ncFiles_gcm <- if (is_GDODCPUCLLNL) gcmFiles[grepl(paste0("_", as.character(gcm), "_"), gcmFiles)] else NULL - if (!be.quiet) print(paste(i, "th extraction of '", GCM_source, "' at", Sys.time(), "for", gcm, "(", paste(rcps, collapse=", "), ") at", lon, lat)) - # Data Bounding Box - if (!(lat >= bbox$lat[1] && lat <= bbox$lat[2] && lon >= bbox$lon[1] && lon <= bbox$lon[2])) { - stop(paste(i, "th extraction of '", GCM_source, "' at", Sys.time(), "at", lon, lat, ": outside of bounding data box")) - } + ncFiles_gcm <- if (is_netCDF) { + climDB_files[grepl(paste0(climDB_meta[["sep_fname"]], gcm, climDB_meta[["sep_fname"]]), + climDB_files, ignore.case = TRUE)] + } else NULL + + if (!be.quiet) + print(paste0(i, "-th extraction of ", shQuote(clim_source), " at ", Sys.time(), + " for ", gcm, " (", paste(rcps, collapse = ", "), ") at ", lon, " / ", lat)) + + #Scenario monthly weather time-series: Get GCM data for each scenario and time slice + scen.monthly <- matrix(vector("list", (getYears$n_first + getYears$n_second) * (1 + length(rcps))), + ncol = getYears$n_first+getYears$n_second, + dimnames = list(c("Current", rcps), + c(paste0("first", seq_len(getYears$n_first)), + paste0("second", seq_len(getYears$n_second))))) + if (print.debug) + print(paste0(i, "-th extraction: first slice ('historical'): ", + paste(getYears$first, collapse = "-"))) + + args_extract1 <- list(i = i, gcm = gcm, scen = "historical", lon = lon, lat = lat, + climDB_meta = climDB_meta) + if (is_netCDF) { + ncFiles <- ncFiles_gcm[grepl(args_extract1[["scen"]], ncFiles_gcm, ignore.case = TRUE)] + ncg <- get.SpatialIndices(filename = ncFiles[1], lon = lon, lat = lat) + args_extract1 <- c(args_extract1, ncFiles = list(ncFiles), ncg = list(ncg)) + } - #Scenario monthly weather time-series: Get GCM data for each scenario and time slice - scen.monthly <- matrix(data=vector("list", length=(getYears$n_first+getYears$n_second)*(1 + length(rcps))), ncol=getYears$n_first+getYears$n_second, dimnames=list(c("Current", as.character(rcps)), c(paste0("first", 1:getYears$n_first), paste0("second", 1:getYears$n_second)))) - #First slice ('historical'): 1950-2005 - args_extract1 <- list(i = i, gcm = gcm, scen = "historical", lon = lon, lat = lat, varTags = varTags) - if (is_GDODCPUCLLNL) { - ncFiles <- ncFiles_gcm[grepl(args_extract1[["scen"]], ncFiles_gcm)] - ncg <- get.SpatialIndices(filename = ncFiles[1], lon = lon, lat = lat) - } - if (is_GDODCPUCLLNL) { - args_extract1 <- c(args_extract1, fileVarTags = list(fileVarTags), ncFiles = list(ncFiles), ncg = list(ncg)) - } else if (is_NEX) { - args_extract1 <- c(args_extract1, bbox = list(bbox), dir.out.temp = dir.out.temp, useRCurl = useRCurl, saveNEXtempfiles = saveNEXtempfiles) - } - for (it in 1:getYears$n_first) { - args_first <- c(args_extract1, - ts_mons = list(getYears$first_dates[[it]]), - dpm = list(getYears$first_dpm[[it]]), - startyear = getYears$first[it, 1], - endyear = getYears$first[it, 2]) - if (is_GDODCPUCLLNL) { - # Time index: differs among variables from the same GCMxRCP: in only once case: HadGEM2-ES x RCP45 - args_first <- c(args_first, nct = list(get.TimeIndices(filename = ncFiles[1], startyear = getYears$first[it, 1], endyear = getYears$first[it, 2]))) - } - scen.monthly[1, it] <- do.call(get_GCMdata, args = args_first) - } + if (is_NEX) { + args_extract1 <- c(args_extract1, dir.out.temp = dir.out.temp) + } - #Second slice ('future scenarios'): 2006-2099 - for (it in 1:getYears$n_second) { - args_extract2 <- c(args_extract1, - ts_mons = list(getYears$second_dates[[it]]), - dpm = list(getYears$second_dpm[[it]]), - startyear = getYears$second[it, 1], - endyear = getYears$second[it, 2]) - # Time index: differs among variables from the same GCMxRCP: in only once case: HadGEM2-ES x RCP45 - if (is_GDODCPUCLLNL) { - args_extract2[["nct"]] <- get.TimeIndices(filename=ncFiles_gcm[grep(as.character(rcps)[1], ncFiles_gcm)[1]], startyear=getYears$second[it, 1], endyear=getYears$second[it, 2]) - } - for (isc in 2:nrow(scen.monthly)) { - args_second <- args_extract2 - args_second[["scen"]] <- as.character(rcps[isc - 1]) - if (is_GDODCPUCLLNL) { - args_second[["ncFiles"]] <- ncFiles_gcm[grepl(args_second[["scen"]], ncFiles_gcm)] - } - scen.monthly[isc, getYears$n_first + it] <- do.call(get_GCMdata, args = args_second) - } - } + for (it in seq_len(getYears$n_first)) { + args_first <- c(args_extract1, + ts_mons = list(getYears$first_dates[[it]]), + dpm = list(getYears$first_dpm[[it]]), + startyear = getYears$first[it, 1], + endyear = getYears$first[it, 2]) + if (is_netCDF) { + # Time index: differs among variables from the same GCMxRCP: in only once case: HadGEM2-ES x RCP45 + args_first <- c(args_first, nct = list( + get.TimeIndices(filename = ncFiles[1], startyear = getYears$first[it, 1], + endyear = getYears$first[it, 2]))) + } + scen.monthly[1, it] <- do.call(get_GCMdata, args = args_first) + } + if (print.debug) + print(paste0(i, "-th extraction: second slice ('future'): ", + paste(getYears$second, collapse = "-"))) + + for (it in seq_len(getYears$n_second)) { + args_extract2 <- c(args_extract1, + ts_mons = list(getYears$second_dates[[it]]), + dpm = list(getYears$second_dpm[[it]]), + startyear = getYears$second[it, 1], + endyear = getYears$second[it, 2]) + + if (is_netCDF) { + # Assume that netCDF file structure is identical among RCPs within a variable + # - differs among variables from the same GCMxRCP: HadGEM2-ES x RCP45 + temp <- ncFiles_gcm[grep(rcps[1], ncFiles_gcm, ignore.case = TRUE)[1]] + args_extract2[["nct"]] <- get.TimeIndices(filename = temp, + startyear = getYears$second[it, 1], endyear = getYears$second[it, 2]) + } + + for (isc in 2:nrow(scen.monthly)) { + args_second <- args_extract2 + args_second[["scen"]] <- rcps[isc - 1] + if (is_netCDF) { + args_second[["ncFiles"]] <- ncFiles_gcm[grepl(args_second[["scen"]], ncFiles_gcm, ignore.case = TRUE)] + } + scen.monthly[isc, getYears$n_first + it] <- do.call(get_GCMdata, args = args_second) + } + } - #Observed historic daily weather from weather database - obs.hist.daily <- Rsoilwat31::dbW_getWeatherData(Site_id=site_id, startYear=simstartyr, endYear=endyr, Scenario=climate.ambient) - if (obs.hist.daily[[1]]@year < 1950) { #TODO(drs): I don't know where the hard coded value of 1950 comes from; it doesn't make sense to me - print("Note: subsetting years 'obs.hist.daily' because 'simstartyr < 1950'") - start_yr <- obs.hist.daily[[length(obs.hist.daily)]]@year - 1950 - obs.hist.daily <- obs.hist.daily[(length(obs.hist.daily)-start_yr):length(obs.hist.daily)] - } + #Observed historic daily weather from weather database + if (print.debug) + print(paste0(i, "-th extraction: observed historic daily weather from weather DB: ", + simstartyr, "-", endyr)) - sim_years <- as.integer(names(obs.hist.daily)) - obs.hist.monthly <- dbW_weatherData_to_monthly(dailySW = obs.hist.daily) - - #Hamlet et al. 2010: "an arbitrary ceiling of 150% of the observed maximum precipitation value for each cell is also imposed by ???spreading out??? very large daily precipitation values into one or more adjacent days" - dailyPPTceiling <- downscaling.options[["daily_ppt_limit"]] * max(unlist(lapply(obs.hist.daily, function(obs) max(obs@data[, "PPT_cm"])))) - #Monthly extremes are used to cut the most extreme spline oscillations; these limits are ad hoc; monthly temperature extremes based on expanded daily extremes - temp <- stretch_values(x = range(sapply(obs.hist.daily, function(obs) obs@data[, c("Tmax_C", "Tmin_C")])), lambda = downscaling.options[["monthly_limit"]]) - monthly_extremes <- list(Tmax = temp, Tmin = temp, PPT = c(0, downscaling.options[["monthly_limit"]] * max(tapply(obs.hist.monthly[, "PPT_cm"], obs.hist.monthly[, 1], sum)))) - - - wdataOut <- list() - for (ir in seq_along(rcps)) { #Apply downscaling for each RCP - #Put historical data together - #NOTE: both scen.hist.monthly and scen.fut.monthly may have NAs because some GCMs do not provide data for the last month of a time slice (e.g. December 2005 may be NA) - if (!all(downs == "raw")) { - scen.hist.monthly <- NULL - for (itt in which(assocYears[["historical"]]$first)) scen.hist.monthly <- rbind(scen.hist.monthly, scen.monthly[1, itt][[1]]) - for (itt in which(assocYears[["historical"]]$second)) scen.hist.monthly <- rbind(scen.hist.monthly, scen.monthly[1 + ir, getYears$n_first + itt][[1]]) - } - - types <- list() - for (it in 1:nrow(future_yrs)) { - tag <- paste0(rownames(future_yrs)[it], ".", rcps[ir]) - - #Put future data together - scen.fut.monthly <- NULL - for (itt in which(assocYears[[tag]]$first)) scen.fut.monthly <- rbind(scen.fut.monthly, scen.monthly[1, itt][[1]]) - for (itt in which(assocYears[[tag]]$second)) scen.fut.monthly <- rbind(scen.fut.monthly, scen.monthly[1 + ir, getYears$n_first + itt][[1]]) - - # Comment: The variables are expected to cover the following time periods - # 'obs.hist.daily' = simstartyr:endyr - # 'obs.hist.monthly' = simstartyr:endyr - # 'scen.hist.monthly' = DScur_startyr:DScur_endyr - # 'scen.fut.monthly' = DSfut_startyr:DSfut_endyr - # 'scen.fut.daily' will cover: delta + simstartyr:endyr - # Units are [degree Celsius] for temperature and [cm / day] and [cm / month], respectively, for precipitation - - #Apply downscaling - if ("raw" %in% downs) { - scenario_id <- dbW_iScenarioTable[dbW_iScenarioTable[, "Scenario"] == tolower(paste("raw", tag, gcm, sep=".")), "id"] - scen.fut.daily <- downscale.raw(obs.hist.daily, obs.hist.monthly, scen.fut.monthly, - years = sim_years, - DScur_startyear = DScur_startyr, DScur_endyear = DScur_endyr, - DSfut_startyear = future_yrs[it, "DSfut_startyr"], DSfut_endyear = future_yrs[it, "DSfut_endyr"], - downscaling.options = downscaling.options, - dailyPPTceiling = dailyPPTceiling, - do_checks = TRUE) - - if (inherits(scen.fut.daily, "try-error")) {#raw unsuccessful, replace with raw without checks - scen.fut.daily <- downscale.raw(obs.hist.daily, obs.hist.monthly, scen.fut.monthly, - years = sim_years, - DScur_startyear = DScur_startyr, DScur_endyear = DScur_endyr, - DSfut_startyear = future_yrs[it, "DSfut_startyr"], DSfut_endyear = future_yrs[it, "DSfut_endyr"], - downscaling.options = downscaling.options, - dailyPPTceiling = dailyPPTceiling, - do_checks = FALSE) - stopifnot(!inherits(scen.fut.daily, "try-error")) - print(paste0(i, ", site_id = ", site_id, ", scenario_id = ", scenario_id, ", ", tolower(paste(tag, gcm, sep=".")), ", timeslice = ", rownames(future_yrs)[it], ": raw method: checks turned off for monthly->daily")) - } - data_blob <- dbW_weatherData_to_blob(scen.fut.daily, compression_type) - years <- as.integer(names(scen.fut.daily)) - types[[length(types)+1]] <- list(Site_id=site_id, Scenario_id=scenario_id, StartYear=years[1], EndYear=years[length(years)], weatherData=data_blob) - } + obs.hist.daily <- Rsoilwat31::dbW_getWeatherData(Site_id = site_id, + startYear = simstartyr, endYear = endyr, Scenario = climate.ambient) - if ("delta" %in% downs) { - scenario_id <- dbW_iScenarioTable[dbW_iScenarioTable[, "Scenario"] == tolower(paste("delta", tag, gcm, sep=".")), "id"] - scen.fut.daily <- downscale.delta(obs.hist.daily, obs.hist.monthly, scen.hist.monthly, scen.fut.monthly, - years = sim_years, - DScur_startyear = DScur_startyr, DScur_endyear = DScur_endyr, - DSfut_startyear = future_yrs[it, "DSfut_startyr"], DSfut_endyear = future_yrs[it, "DSfut_endyr"], - downscaling.options = downscaling.options, - dailyPPTceiling = dailyPPTceiling, - do_checks = TRUE) - if (inherits(scen.fut.daily, "try-error")) {#delta unsuccessful, replace with delta without checks - scen.fut.daily <- downscale.delta(obs.hist.daily, obs.hist.monthly, scen.hist.monthly, scen.fut.monthly, - years = sim_years, - DScur_startyear = DScur_startyr, DScur_endyear = DScur_endyr, - DSfut_startyear = future_yrs[it, "DSfut_startyr"], DSfut_endyear = future_yrs[it, "DSfut_endyr"], - downscaling.options = downscaling.options, - dailyPPTceiling = dailyPPTceiling, - do_checks = FALSE) - stopifnot(!inherits(scen.fut.daily, "try-error")) - print(paste0(i, ", site_id = ", site_id, ", scenario_id = ", scenario_id, ", ", tolower(paste(tag, gcm, sep=".")), ", timeslice = ", rownames(future_yrs)[it], ": delta method: checks turned off for monthly->daily")) - } - data_blob <- dbW_weatherData_to_blob(scen.fut.daily, compression_type) - years <- as.integer(names(scen.fut.daily)) - types[[length(types)+1]] <- list(Site_id=site_id, Scenario_id=scenario_id, StartYear=years[1], EndYear=years[length(years)], weatherData=data_blob) - } + if (obs.hist.daily[[1]]@year < 1950) { #TODO(drs): I don't know where the hard coded value of 1950 comes from; it doesn't make sense to me + print("Note: subsetting years 'obs.hist.daily' because 'simstartyr < 1950'") + start_yr <- obs.hist.daily[[length(obs.hist.daily)]]@year - 1950 + obs.hist.daily <- obs.hist.daily[(length(obs.hist.daily)-start_yr):length(obs.hist.daily)] + } - if ("hybrid-delta" %in% downs) { - scenario_id <- dbW_iScenarioTable[dbW_iScenarioTable[, "Scenario"] == tolower(paste("hybrid-delta", tag, gcm, sep=".")), "id"] - scen.fut.daily <- downscale.deltahybrid(obs.hist.daily, obs.hist.monthly, scen.hist.monthly, scen.fut.monthly, - years = sim_years, - DScur_startyear = DScur_startyr, DScur_endyear = DScur_endyr, - DSfut_startyear = future_yrs[it, "DSfut_startyr"], DSfut_endyear = future_yrs[it, "DSfut_endyr"], - downscaling.options = downscaling.options, - do_checks = TRUE) - if (inherits(scen.fut.daily, "try-error")) {#delta-hybrid unsuccessful, replace with delta method - scen.fut.daily <- downscale.delta(obs.hist.daily, obs.hist.monthly, scen.hist.monthly, scen.fut.monthly, - years = sim_years, - DScur_startyear = DScur_startyr, DScur_endyear = DScur_endyr, - DSfut_startyear = future_yrs[it, "DSfut_startyr"], DSfut_endyear = future_yrs[it, "DSfut_endyr"], - downscaling.options = downscaling.options, - dailyPPTceiling = dailyPPTceiling, - do_checks = FALSE) - stopifnot(!inherits(scen.fut.daily, "try-error")) - print(paste0(i, ", site_id = ", site_id, ", scenario_id = ", scenario_id, ", ", tolower(paste(tag, gcm, sep=".")), ", timeslice = ", rownames(future_yrs)[it], ": delta-hybrid replaced by delta method for monthly->daily")) - } - data_blob <- dbW_weatherData_to_blob(scen.fut.daily, compression_type) - years <- as.integer(names(scen.fut.daily)) - types[[length(types)+1]] <- list(Site_id=site_id, Scenario_id=scenario_id, StartYear=years[1], EndYear=years[length(years)], weatherData=data_blob) - } + sim_years <- as.integer(names(obs.hist.daily)) + obs.hist.monthly <- dbW_weatherData_to_monthly(dailySW = obs.hist.daily) - if ("hybrid-delta-3mod" %in% downs) { - scenario_id <- dbW_iScenarioTable[dbW_iScenarioTable[, "Scenario"] == tolower(paste("hybrid-delta-3mod", tag, gcm, sep=".")), "id"] - - scen.fut.daily <- downscale.deltahybrid3mod(obs.hist.daily, obs.hist.monthly, scen.hist.monthly, scen.fut.monthly, - deltaFuture_yr = future_yrs[it, "delta"], years = sim_years, - DScur_startyear = DScur_startyr, DScur_endyear = DScur_endyr, - DSfut_startyear = future_yrs[it, "DSfut_startyr"], DSfut_endyear = future_yrs[it, "DSfut_endyr"], - downscaling.options = downscaling.options, - dailyPPTceiling = dailyPPTceiling, - monthly_extremes = monthly_extremes, - do_checks = TRUE) - - if (inherits(scen.fut.daily, "try-error")) {#delta-hybrid-3mod unsuccessful, replace with delta method - scen.fut.daily <- downscale.delta(obs.hist.daily, obs.hist.monthly, scen.hist.monthly, scen.fut.monthly, - years = sim_years, - DScur_startyear = DScur_startyr, DScur_endyear = DScur_endyr, - DSfut_startyear = future_yrs[it, "DSfut_startyr"], DSfut_endyear = future_yrs[it, "DSfut_endyr"], - downscaling.options = downscaling.options, - dailyPPTceiling = dailyPPTceiling, - do_checks = FALSE) - stopifnot(!inherits(scen.fut.daily, "try-error")) - print(paste0(i, ", site_id = ", site_id, ", scenario_id = ", scenario_id, ", ", tolower(paste(tag, gcm, sep=".")), ", timeslice = ", rownames(future_yrs)[it], ": delta-hybrid-3mod replaced by delta method for monthly->daily")) - } - data_blob <- dbW_weatherData_to_blob(scen.fut.daily, compression_type) - years <- as.integer(names(scen.fut.daily)) - types[[length(types)+1]] <- list(Site_id=site_id, Scenario_id=scenario_id, StartYear=years[1], EndYear=years[length(years)], weatherData=data_blob) - } - } - wdataOut[[ir]] <- types - } - saveRDS(wdataOut, file=file.path(dir.out.temp, gcm, paste(GCM_source, "_", i, ".rds", sep=""))) - res <- i + if (print.debug) { + obs.hist.monthly_mean <- aggregate(obs.hist.monthly[, -(1:2)], + list(obs.hist.monthly[, "Month"]), mean) + } - res - }) + #Hamlet et al. 2010: "an arbitrary ceiling of 150% of the observed maximum precipitation value for each cell is also imposed by ???spreading out??? very large daily precipitation values into one or more adjacent days" + dailyPPTceiling <- opt_DS[["daily_ppt_limit"]] * max(unlist(lapply(obs.hist.daily, function(obs) max(obs@data[, "PPT_cm"])))) + #Monthly extremes are used to cut the most extreme spline oscillations; these limits are ad hoc; monthly temperature extremes based on expanded daily extremes + temp <- stretch_values(x = range(sapply(obs.hist.daily, function(obs) obs@data[, c("Tmax_C", "Tmin_C")])), lambda = opt_DS[["monthly_limit"]]) + monthly_extremes <- list(Tmax = temp, Tmin = temp, PPT = c(0, opt_DS[["monthly_limit"]] * max(tapply(obs.hist.monthly[, "PPT_cm"], obs.hist.monthly[, 1], sum)))) + + + wdataOut <- list() + for (ir in seq_along(rcps)) { #Apply downscaling for each RCP + #Put historical data together + #NOTE: both scen.hist.monthly and scen.fut.monthly may have NAs because some GCMs do not provide data for the last month of a time slice (e.g. December 2005 may be NA) + scen.hist.monthly <- NULL + if (!all(downs == "raw")) { + for (itt in which(assocYears[["historical"]]$first)) + scen.hist.monthly <- rbind(scen.hist.monthly, scen.monthly[1, itt][[1]]) + + for (itt in which(assocYears[["historical"]]$second)) + scen.hist.monthly <- rbind(scen.hist.monthly, scen.monthly[1 + ir, getYears$n_first + itt][[1]]) + } + + if (print.debug && !is.null(scen.hist.monthly)) { + scen.hist.monthly_mean <- aggregate(scen.hist.monthly[, -(1:2)], + list(scen.hist.monthly[, "month"]), mean, na.rm = TRUE) + + temp <- apply(scen.hist.monthly_mean[, -1] - obs.hist.monthly_mean[, -1], 2, mean) + print(paste0(i, "-th extraction: 'scen hist' - 'obs hist': ", + paste(colnames(obs.hist.monthly[, -(1:2)]), "=", round(temp, 2), collapse = ", "))) + } + + types <- list() + for (it in seq_len(nrow(future_yrs))) { + tag <- paste0(rownames(future_yrs)[it], ".", rcps[ir]) + + #Put future data together + scen.fut.monthly <- NULL + for (itt in which(assocYears[[tag]]$first)) + scen.fut.monthly <- rbind(scen.fut.monthly, scen.monthly[1, itt][[1]]) + + for (itt in which(assocYears[[tag]]$second)) + scen.fut.monthly <- rbind(scen.fut.monthly, scen.monthly[1 + ir, getYears$n_first + itt][[1]]) + + if (print.debug) { + scen.fut.monthly_mean <- aggregate(scen.fut.monthly[, -(1:2)], + list(scen.fut.monthly[, "month"]), mean, na.rm = TRUE) + } + + # Comment: The variables are expected to cover the following time periods + # 'obs.hist.daily' = simstartyr:endyr + # 'obs.hist.monthly' = simstartyr:endyr + # 'scen.hist.monthly' = DScur_startyr:DScur_endyr + # 'scen.fut.monthly' = DSfut_startyr:DSfut_endyr + # 'scen.fut.daily' will cover: delta + simstartyr:endyr + # Units are [degree Celsius] for temperature and [cm / day] and [cm / month], respectively, for precipitation + + #Apply downscaling + for (dm in downs) { + if (print.debug) + print(paste0(i, "-th extraction: ", tag, " downscaling with method ", shQuote(dm))) + + temp <- dbW_iScenarioTable[, "Scenario"] == tolower(paste(dm, tag, gcm, sep = ".")) + scenario_id <- dbW_iScenarioTable[temp, "id"] + + dm_fun <- switch(dm, raw = downscale.raw, delta = downscale.delta, + `hybrid-delta` = downscale.deltahybrid, + `hybrid-delta-3mod` = downscale.deltahybrid3mod, stop) + + for (do_checks in c(TRUE, FALSE)) { + scen.fut.daily <- try(dm_fun( + obs.hist.daily = obs.hist.daily, obs.hist.monthly = obs.hist.monthly, + scen.hist.monthly = scen.hist.monthly, scen.fut.monthly= scen.fut.monthly, + deltaFuture_yr = future_yrs[it, "delta"], years = sim_years, + DScur_startyear = DScur_startyr, DScur_endyear = DScur_endyr, + DSfut_startyear = future_yrs[it, "DSfut_startyr"], + DSfut_endyear = future_yrs[it, "DSfut_endyr"], + opt_DS = opt_DS, + dailyPPTceiling = dailyPPTceiling, monthly_extremes = monthly_extremes, + do_checks = do_checks)) + + if (!inherits(scen.fut.daily, "try-error")) { + if (!do_checks) + print(paste0(i, "-th extraction: ", tag, ": ", shQuote(dm), + " quality checks turned off")) + break + } + } + + if (inherits(scen.fut.daily, "try-error")) + stop(scen.fut.daily) + + if (print.debug) { + temp <- dbW_weatherData_to_monthly(scen.fut.daily) + scen.fut.down_mean <- aggregate(temp[, -(1:2)], list(temp[, "Month"]), mean) + + temp <- apply(scen.fut.down_mean[, -1] - obs.hist.monthly_mean[, -1], 2, mean) + print(paste0(i, "-th extraction: ", tag, ": ", shQuote(dm), + "'downscaled fut' - 'obs hist': ", + paste(colnames(obs.hist.monthly[, -(1:2)]), "=", round(temp, 2), collapse = ", "))) + + temp <- apply(scen.fut.down_mean[, -1] - scen.hist.monthly_mean[, -1], 2, mean) + print(paste0(i, "-th extraction: ", tag, ": ", shQuote(dm), + ": 'downscaled fut' - 'scen hist': ", + paste(colnames(obs.hist.monthly[, -(1:2)]), "=", round(temp, 2), collapse = ", "))) + } + + data_blob <- dbW_weatherData_to_blob(scen.fut.daily, compression_type) + years <- as.integer(names(scen.fut.daily)) + + types[[length(types) + 1]] <- list(Site_id = site_id, Scenario_id = scenario_id, + StartYear = years[1], EndYear = years[length(years)], weatherData = data_blob) + } + } + + wdataOut[[ir]] <- types + } + + saveRDS(wdataOut, + file = file.path(dir.out.temp, gcm, paste0(clim_source, "_", i, ".rds"))) + res <- i + on.exit() + + res + }) #' Make daily weather for a scenario #' #' A wrapper function for \code{calc.ScenarioWeather} with error control. #' #' @inheritParams calc.ScenarioWeather - try.ScenarioWeather <- compiler::cmpfun(function(i, GCM_source, is_GDODCPUCLLNL, is_NEX, gcmFiles, reqGCMs, reqRCPsPerGCM, reqDownscalingsPerGCM, climate.ambient, bbox, locations, dbW_iSiteTable, compression_type, varTags, fileVarTags, getYears, assocYears, future_yrs, simstartyr, endyr, DScur_startyr, DScur_endyr, downscaling.options, dir.out.temp, be.quiet, useRCurl, saveNEXtempfiles) { + try.ScenarioWeather <- compiler::cmpfun(function(i, clim_source, is_netCDF, is_NEX, climDB_meta, climDB_files, reqGCMs, reqRCPsPerGCM, reqDownscalingsPerGCM, climate.ambient, locations, dbW_iSiteTable, compression_type, getYears, assocYears, future_yrs, simstartyr, endyr, DScur_startyr, DScur_endyr, opt_DS, dir.out.temp, be.quiet, print.debug) { temp <- try(calc.ScenarioWeather(i = i, - GCM_source = GCM_source, is_GDODCPUCLLNL = is_GDODCPUCLLNL, is_NEX = is_NEX, - gcmFiles = gcmFiles, + clim_source = clim_source, is_netCDF = is_netCDF, is_NEX = is_NEX, + climDB_meta = climDB_meta, climDB_files = climDB_files, reqGCMs = reqGCMs, reqRCPsPerGCM = reqRCPsPerGCM, reqDownscalingsPerGCM = reqDownscalingsPerGCM, climate.ambient = climate.ambient, - bbox = bbox, locations = locations, dbW_iSiteTable = dbW_iSiteTable, compression_type = compression_type, - varTags = varTags, fileVarTags = fileVarTags, getYears = getYears, assocYears = assocYears, future_yrs = future_yrs, simstartyr = simstartyr, endyr = endyr, DScur_startyr = DScur_startyr, DScur_endyr = DScur_endyr, - downscaling.options = downscaling.options, + opt_DS = opt_DS, dir.out.temp = dir.out.temp, - be.quiet = be.quiet, - useRCurl = useRCurl, saveNEXtempfiles = saveNEXtempfiles), - silent = FALSE) + be.quiet = be.quiet, print.debug = print.debug)) if (inherits(temp, "try-error")) { print(paste(Sys.time(), temp)) - save(i, temp, file = file.path(dir.out.temp, paste0("failed_", i, ".RData"))) + save(i, temp, clim_source, is_netCDF, is_NEX, climDB_meta, climDB_files, reqGCMs, reqRCPsPerGCM, + reqDownscalingsPerGCM, climate.ambient, locations, dbW_iSiteTable, + compression_type, getYears, assocYears, future_yrs, + simstartyr, endyr, DScur_startyr, DScur_endyr, opt_DS, + dir.out.temp, be.quiet, + file = file.path(dir.out.temp, paste0("ClimScen_failed_", i, "_l1.RData"))) res <- NULL } else { res <- i @@ -2105,7 +2478,7 @@ if (exinfo$GDODCPUCLLNL || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_U #' Organizes the calls (in parallel) which obtain specified scenario weather for the weather database from one of the available GCM sources #' #' This function assumes that a whole bunch of global variables exist and contain appropriate values. - tryToGet_ClimDB <- compiler::cmpfun(function(is_ToDo, list.export, GCM_source, is_GDODCPUCLLNL, is_NEX, gcmFiles, reqGCMs, reqRCPsPerGCM, reqDownscalingsPerGCM, bbox, locations, varTags, fileVarTags, getYears, assocYears, saveNEXtempfiles) { + tryToGet_ClimDB <- compiler::cmpfun(function(is_ToDo, list.export, clim_source, is_netCDF, is_NEX, climDB_meta, climDB_files, reqGCMs, reqRCPsPerGCM, reqDownscalingsPerGCM, locations, getYears, assocYears) { #requests is_ToDo: fastest if nc file is # - DONE: permutated to (lat, lon, time) instead (time, lat, lon) # - TODO: many sites are extracted from one nc-read instead of one site per nc-read (see benchmarking_GDODCPUCLLNL_extractions.R) @@ -2119,30 +2492,23 @@ if (exinfo$GDODCPUCLLNL || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_U export_objects_to_workers(list.export, list(local = environment(), parent = parent.frame(), global = .GlobalEnv), "mpi") - if (is_NEX && useRCurl && !saveNEXtempfiles) - Rmpi::mpi.bcast.cmd(library("RCurl", quietly=TRUE)) - if (is_GDODCPUCLLNL) - Rmpi::mpi.bcast.cmd(library("ncdf4", quietly=TRUE)) Rmpi::mpi.bcast.cmd(library("Rsoilwat31", quietly=TRUE)) Rmpi::mpi.bcast.cmd(Rsoilwat31::dbW_setConnection(dbFilePath=dbWeatherDataFile)) i_Done <- Rmpi::mpi.applyLB(x = is_ToDo, fun = try.ScenarioWeather, - GCM_source = GCM_source, is_GDODCPUCLLNL = is_GDODCPUCLLNL, is_NEX = is_NEX, - gcmFiles = gcmFiles, + clim_source = clim_source, is_netCDF = is_netCDF, is_NEX = is_NEX, + climDB_meta = climDB_meta, climDB_files = climDB_files, reqGCMs = reqGCMs, reqRCPsPerGCM = reqRCPsPerGCM, reqDownscalingsPerGCM = reqDownscalingsPerGCM, climate.ambient = climate.ambient, - bbox = bbox, locations = locations, dbW_iSiteTable = dbW_iSiteTable, compression_type = dbW_compression_type, - varTags = varTags, fileVarTags = fileVarTags, getYears = getYears, assocYears = assocYears, future_yrs = future_yrs, simstartyr = simstartyr, endyr = endyr, DScur_startyr = DScur_startyr, DScur_endyr = DScur_endyr, - downscaling.options = downscaling.options, + opt_DS = opt_DS, dir.out.temp = dir.out.temp, - be.quiet = be.quiet, - useRCurl = useRCurl, saveNEXtempfiles = saveNEXtempfiles) + be.quiet = be.quiet, print.debug = print.debug) Rmpi::mpi.bcast.cmd(Rsoilwat31::dbW_disconnectConnection()) Rmpi::mpi.bcast.cmd(rm(list=ls())) @@ -2153,30 +2519,23 @@ if (exinfo$GDODCPUCLLNL || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_U list(local = environment(), parent = parent.frame(), global = .GlobalEnv), "snow", cl) - if (is_NEX && useRCurl && !saveNEXtempfiles) - snow::clusterEvalQ(cl, library("RCurl", quietly = TRUE)) - if (is_GDODCPUCLLNL) - snow::clusterEvalQ(cl, library("ncdf4", quietly=TRUE)) snow::clusterEvalQ(cl, library("Rsoilwat31", quietly=TRUE)) snow::clusterEvalQ(cl, Rsoilwat31::dbW_setConnection(dbFilePath=dbWeatherDataFile)) i_Done <- snow::clusterApplyLB(cl, x = is_ToDo, fun = try.ScenarioWeather, - GCM_source = GCM_source, is_GDODCPUCLLNL = is_GDODCPUCLLNL, is_NEX = is_NEX, - gcmFiles = gcmFiles, + clim_source = clim_source, is_netCDF = is_netCDF, is_NEX = is_NEX, + climDB_meta = climDB_meta, climDB_files = climDB_files, reqGCMs = reqGCMs, reqRCPsPerGCM = reqRCPsPerGCM, reqDownscalingsPerGCM = reqDownscalingsPerGCM, climate.ambient = climate.ambient, - bbox = bbox, locations = locations, dbW_iSiteTable = dbW_iSiteTable, compression_type = dbW_compression_type, - varTags = varTags, fileVarTags = fileVarTags, getYears = getYears, assocYears = assocYears, future_yrs = future_yrs, simstartyr = simstartyr, endyr = endyr, DScur_startyr = DScur_startyr, DScur_endyr = DScur_endyr, - downscaling.options = downscaling.options, + opt_DS = opt_DS, dir.out.temp = dir.out.temp, - be.quiet = be.quiet, - useRCurl = useRCurl, saveNEXtempfiles = saveNEXtempfiles) + be.quiet = be.quiet, print.debug = print.debug) snow::clusterEvalQ(cl, Rsoilwat31::dbW_disconnectConnection()) snow::clusterEvalQ(cl, rm(list=ls())) @@ -2184,29 +2543,22 @@ if (exinfo$GDODCPUCLLNL || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_U } else if (identical(parallel_backend, "multicore")) { packages.export <- "Rsoilwat31" - if (is_NEX && useRCurl && !saveNEXtempfiles) - packages.export <- c(packages.export, "RCurl") - if (is_GDODCPUCLLNL) - packages.export <- c(packages.export, "ncdf4") i_Done <- foreach(i=is_ToDo, .combine="c", .errorhandling="remove", .inorder=FALSE, .export=list.export, .packages=packages.export) %dopar% { Rsoilwat31::dbW_setConnection(dbFilePath=dbWeatherDataFile) temp <- try.ScenarioWeather(i, - GCM_source = GCM_source, is_GDODCPUCLLNL = is_GDODCPUCLLNL, is_NEX = is_NEX, - gcmFiles = gcmFiles, + clim_source = clim_source, is_netCDF = is_netCDF, is_NEX = is_NEX, + climDB_meta = climDB_meta, climDB_files = climDB_files, reqGCMs = reqGCMs, reqRCPsPerGCM = reqRCPsPerGCM, reqDownscalingsPerGCM = reqDownscalingsPerGCM, climate.ambient = climate.ambient, - bbox = bbox, locations = locations, dbW_iSiteTable = dbW_iSiteTable, compression_type = dbW_compression_type, - varTags = varTags, fileVarTags = fileVarTags, getYears = getYears, assocYears = assocYears, future_yrs = future_yrs, simstartyr = simstartyr, endyr = endyr, DScur_startyr = DScur_startyr, DScur_endyr = DScur_endyr, - downscaling.options = downscaling.options, + opt_DS = opt_DS, dir.out.temp = dir.out.temp, - be.quiet = be.quiet, - useRCurl = useRCurl, saveNEXtempfiles = saveNEXtempfiles) + be.quiet = be.quiet, print.debug = print.debug) Rsoilwat31::dbW_disconnectConnection() return(temp) } @@ -2217,29 +2569,27 @@ if (exinfo$GDODCPUCLLNL || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_U } else { Rsoilwat31::dbW_setConnection(dbFilePath=dbWeatherDataFile) i_Done <- foreach(i=is_ToDo, .combine="c", .errorhandling="remove", .inorder=FALSE) %do% try.ScenarioWeather(i, - GCM_source = GCM_source, is_GDODCPUCLLNL = is_GDODCPUCLLNL, is_NEX = is_NEX, - gcmFiles = gcmFiles, + clim_source = clim_source, is_netCDF = is_netCDF, is_NEX = is_NEX, + climDB_meta = climDB_meta, climDB_files = climDB_files, reqGCMs = reqGCMs, reqRCPsPerGCM = reqRCPsPerGCM, reqDownscalingsPerGCM = reqDownscalingsPerGCM, climate.ambient = climate.ambient, - bbox = bbox, locations = locations, dbW_iSiteTable = dbW_iSiteTable, compression_type = dbW_compression_type, - varTags = varTags, fileVarTags = fileVarTags, getYears = getYears, assocYears = assocYears, future_yrs = future_yrs, simstartyr = simstartyr, endyr = endyr, DScur_startyr = DScur_startyr, DScur_endyr = DScur_endyr, - downscaling.options = downscaling.options, + opt_DS = opt_DS, dir.out.temp = dir.out.temp, - be.quiet = be.quiet, - useRCurl = useRCurl, saveNEXtempfiles = saveNEXtempfiles) + be.quiet = be.quiet, print.debug = print.debug) Rsoilwat31::dbW_disconnectConnection() } - if (!be.quiet) print(paste("Started adding temporary files into database '", GCM_source, "' at", Sys.time())) + + if (!be.quiet) print(paste("Started adding temporary files into database '", clim_source, "' at", Sys.time())) Rsoilwat31::dbW_setConnection(dbFilePath=dbWeatherDataFile) - temp.files <- list.files(path=dir.out.temp, pattern=GCM_source, recursive=TRUE, include.dirs=FALSE, no..=TRUE) + temp.files <- list.files(path=dir.out.temp, pattern=clim_source, recursive=TRUE, include.dirs=FALSE, no..=TRUE) if (length(temp.files) > 0) { for (k in seq_along(temp.files)) { ftemp <- file.path(dir.out.temp, temp.files[k]) @@ -2252,10 +2602,12 @@ if (exinfo$GDODCPUCLLNL || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_U Scenario_id = wdataOut[[j]][[l]]$Scenario_id, StartYear = wdataOut[[j]][[l]]$StartYear, EndYear = wdataOut[[j]][[l]]$EndYear, - weather_blob = wdataOut[[j]][[l]]$weatherData), - silent=TRUE) + weather_blob = wdataOut[[j]][[l]]$weatherData)) if (inherits(res, "try-error")) { - if (!be.quiet) print(paste("Adding downscaled data for Site_id", wdataOut[[j]][[l]]$Site_id, "scenario", wdataOut[[j]][[l]]$Scenario_id, "was unsuccessful:", temp)) + if (!be.quiet) + print(paste("Adding downscaled data for Site_id", + wdataOut[[j]][[l]]$Site_id, "scenario", + wdataOut[[j]][[l]]$Scenario_id, "was unsuccessful:", temp)) break } } @@ -2275,77 +2627,28 @@ if (exinfo$GDODCPUCLLNL || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_U #--------------------------------------------------------------------------------------------------# #------EXTRACT CLIMATE CHANGE DATA------ -# Allow for multiple data sources -# - among sites but not multiple sources per site (for that you need a new row in the MasterInput spreadsheet) -# - NEX USA data has highest priority, then LLNL USA products, then global LLNL products - -if (exinfo$GDODCPUCLLNL || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_USA) { - - xy <- with(SWRunInformation[runIDs_sites,], data.frame(X_WGS84, Y_WGS84)) - i_use <- rep(FALSE, times = runsN_sites) - - if (extract_determine_database == "SWRunInformation" && "GCM_sources" %in% colnames(SWRunInformation)) { - sites_GCM_source <- SWRunInformation$GCM_sources[runIDs_sites] - - } else if (extract_determine_database == "order" || !("GCM_sources" %in% colnames(SWRunInformation))) { - sites_GCM_source <- rep(NA, times = runsN_sites) - # determine which data product to use for each site based on bounding boxes of datasets - if (exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_USA) { - i_use <- in_GMC_box(xy = xy, lats=c(24.0625, 49.9375), longs=c(-125.02083333, -66.47916667), i_use=i_use) - sites_GCM_source[i_use] <- "CMIP5_BCSD_NEX_USA" - } - if (exinfo$ExtractClimateChangeScenarios_CMIP3_BCSD_GDODCPUCLLNL_USA) { - i_use <- in_GMC_box(xy = xy, lats=c(25.125, 52.875), longs=c(-124.625, -67), i_use=i_use) - sites_GCM_source[i_use] <- "CMIP3_BCSD_GDODCPUCLLNL_USA" - } - if (exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_GDODCPUCLLNL_USA) { - i_use <- in_GMC_box(xy = xy, lats=c(25.125, 52.875), longs=c(-124.625, -67), i_use=i_use) - sites_GCM_source[i_use] <- "CMIP5_BCSD_GDODCPUCLLNL_USA" - } - if (exinfo$ExtractClimateChangeScenarios_CMIP3_BCSD_GDODCPUCLLNL_Global) { - i_use <- in_GMC_box(xy = xy, lats=c(-55.25-0.25, 83.25+0.25), longs=c(-179.75-0.25, 179.75+0.25), i_use=i_use) - sites_GCM_source[i_use] <- "CMIP3_BCSD_GDODCPUCLLNL_Global" - } - if (exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_GDODCPUCLLNL_Global) { - i_use <- in_GMC_box(xy = xy, lats=c(-55.25-0.25, 83.25+0.25), longs=c(-179.75-0.25, 179.75+0.25), i_use=i_use) - sites_GCM_source[i_use] <- "CMIP5_BCSD_GDODCPUCLLNL_Global" - } - - #write data to datafile.SWRunInformation - SWRunInformation$GCM_sources[runIDs_sites] <- as.character(sites_GCM_source) - write.csv(SWRunInformation, file=file.path(dir.in, datafile.SWRunInformation), row.names=FALSE) - unlink(file.path(dir.in, datafile.SWRWinputs_preprocessed)) - - } else { - stop(paste("Value of 'extract_determine_database'", extract_determine_database, "not implemented")) - } - - if (anyNA(sites_GCM_source)) print("No climate change data available for ", sum(is.na(sites_GCM_source)), " sites") +if (exinfo$ExtractClimateChangeScenarios && + any(exinfo$which_NEX) || any(exinfo$which_netCDF)) { #access climate change data - get_climatechange_data <- compiler::cmpfun(function(GCM_source, is_GDODCPUCLLNL, is_NEX, do_SWRun_sites, include_YN_climscen) { - # GCM_source <- if (GCM_source == "CMIP5_BCSD_NEX_USA") { - # "CMIP5-BCSD-NEX-USA" - # } else if (grepl("CMIP3_BCSD_GDODCPUCLLNL", GCM_source)) { - # "CMIP3-BCSD-GDO-DCP-UC-LLNL" - # } else if (grepl("CMIP5_BCSD_GDODCPUCLLNL", GCM_source)) { - # "CMIP5-BCSD-GDO-DCP-UC-LLNL" - # } else { - # stop("No dataset for climate change conditions set") - # } - if (!be.quiet) print(paste0("Started '", GCM_source, "' at ", Sys.time())) + get_climatechange_data <- compiler::cmpfun(function(clim_source, is_netCDF, is_NEX, do_SWRun_sites, include_YN_climscen, climDB_meta) { + if (!be.quiet) print(paste0("Started", shQuote(clim_source), "at ", Sys.time())) #Global flags repeatExtractionLoops_maxN <- 3 + temp <- strsplit(clim_source, split = "_", fixed = TRUE)[[1]] + dir.ex.dat <- file.path(dir.ex.fut, "ClimateScenarios", + temp[1], paste(temp[-1], collapse = "_")) + #Specific flags - if (is_GDODCPUCLLNL) { + if (is_netCDF) { ##gdo-dcp.ucllnl.org/downscaled_cmip_projections - dir.ex.dat <- file.path(dir.ex.fut, "GDO_DCP_UCLLNL_DownscaledClimateData") - if (GCM_source == "CMIP3_BCSD_GDODCPUCLLNL_USA") dir.ex.dat <- file.path(dir.ex.dat, "CMIP3_BCSD", "CONUS_0.125degree") - if (GCM_source == "CMIP3_BCSD_GDODCPUCLLNL_Global") dir.ex.dat <- file.path(dir.ex.dat, "CMIP3_BCSD", "Global_0.5degree_MaurerEd") - if (GCM_source == "CMIP5_BCSD_GDODCPUCLLNL_USA") dir.ex.dat <- file.path(dir.ex.dat, "CMIP5_BCSD", "CONUS_0.125degree_r1i1p1") - if (GCM_source == "CMIP5_BCSD_GDODCPUCLLNL_Global") dir.ex.dat <- file.path(dir.ex.dat, "CMIP5_BCSD", "Global_0.5degree_r1i1p1") +# dir.ex.dat <- file.path(dir.ex.fut, "GDO_DCP_UCLLNL_DownscaledClimateData") +# if (clim_source == "CMIP3_BCSD_GDODCPUCLLNL_USA") dir.ex.dat <- file.path(dir.ex.dat, "CMIP3_BCSD", "CONUS_0.125degree") +# if (clim_source == "CMIP3_BCSD_GDODCPUCLLNL_Global") dir.ex.dat <- file.path(dir.ex.dat, "CMIP3_BCSD", "Global_0.5degree_MaurerEd") +# if (clim_source == "CMIP5_BCSD_GDODCPUCLLNL_USA") dir.ex.dat <- file.path(dir.ex.dat, "CMIP5_BCSD", "CONUS_0.125degree_r1i1p1") +# if (clim_source == "CMIP5_BCSD_GDODCPUCLLNL_Global") dir.ex.dat <- file.path(dir.ex.dat, "CMIP5_BCSD", "Global_0.5degree_r1i1p1") #CMIP3 Global and USA # - obs: 1950 Jan to 1999 Dec @@ -2362,95 +2665,91 @@ if (exinfo$GDODCPUCLLNL || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_U # => ignore missing Dec value; ignore 2005 Dec value if that is the start # - all same spatial coordinates - scenariosDB <- list.dirs(dir.ex.dat, full.names=FALSE, recursive=FALSE) - if (any((temp <- sapply(scenariosDB, FUN=function(x) length(list.files(file.path(dir.ex.dat, x))))) == 0)) scenariosDB <- scenariosDB[temp > 0] + # get netCDF files + temp <- list.files(dir.ex.dat, full.names = TRUE, recursive = TRUE) + ext <- sapply(strsplit(basename(temp), split = ".", fixed = TRUE), function(x) x[length(x)]) + climDB_files <- temp[tolower(ext) %in% c("nc", "nc4", "ncdf", "netcdf")] + if (length(climDB_files) == 0) + stop("Could find no files for ", shQuote(clim_source), " in ", dir.ex.dat) + + climDB_fname_meta <- strsplit(basename(climDB_files), + split = climDB_meta[["sep_fname"]], fixed = TRUE) + stopifnot(diff(lengths(climDB_fname_meta)) == 0L) - gcmsDB <- unique(unlist(sapply(scenariosDB, FUN=function(x) sapply(strsplit(list.files(file.path(dir.ex.dat, x)), split="_", fixed=TRUE), FUN=function(x) x[5])))) - gcmFiles <- list.files(dir.ex.dat, recursive=TRUE, full.names=TRUE) + temp <- matrix(unlist(climDB_fname_meta), ncol = length(climDB_fname_meta)) + climDB_struct <- lapply(climDB_meta[["str_fname"]], function(id) unique(temp[id, ])) print_int <- 100 - saveNEXtempfiles <- FALSE } if (is_NEX) { ##https://portal.nccs.nasa.gov/portal_home/published/NEX.html opt <- options("timeout") options(timeout=5*60) - dir.ex.dat <- NULL - if (useRCurl) { - stopifnot(RCurl::url.exists("https://portal.nccs.nasa.gov/portal_home/published/NEX.html")) #check whether we are online - saveNEXtempfiles <- FALSE + if (requireNamespace("RCurl")) { + if (!RCurl::url.exists("https://portal.nccs.nasa.gov/portal_home/published/NEX.html")) { + # check whether we are online + stop("We and/or the 'NEX' server are offline") + } } else { - saveNEXtempfiles <- TRUE - } - - gcmsDB <- c("inmcm4", "bcc-csm1-1", "bcc-csm1-1-m", "NorESM1-M", "MRI-CGCM3", "MPI-ESM-MR", "MPI-ESM-LR", "MIROC5", "MIROC-ESM", "MIROC-ESM-CHEM", "IPSL-CM5B-LR", "IPSL-CM5A-MR", "IPSL-CM5A-LR", "HadGEM2-ES", "HadGEM2-CC", "HadGEM2-AO", "GISS-E2-R", "GFDL-ESM2M", "GFDL-ESM2G", "GFDL-CM3", "FIO-ESM", "FGOALS-g2", "CanESM2", "CSIRO-Mk3-6-0", "CNRM-CM5", "CMCC-CM", "CESM1-CAM5", "CESM1-BGC", "CCSM4", "BNU-ESM", "ACCESS1-0") - scenariosDB <- c("historical", "rcp26", "rcp45", "rcp60", "rcp85") - gcmFiles <- NULL + print("We assume that we and the 'NEX' server are online.") + } + + climDB_struct <- list( + id_var = NULL, + id_gcm = c("inmcm4", "bcc-csm1-1", "bcc-csm1-1-m", "NorESM1-M", "MRI-CGCM3", + "MPI-ESM-MR", "MPI-ESM-LR", "MIROC5", "MIROC-ESM", "MIROC-ESM-CHEM", + "IPSL-CM5B-LR", "IPSL-CM5A-MR", "IPSL-CM5A-LR", "HadGEM2-ES", + "HadGEM2-CC", "HadGEM2-AO", "GISS-E2-R", "GFDL-ESM2M", "GFDL-ESM2G", + "GFDL-CM3", "FIO-ESM", "FGOALS-g2", "CanESM2", "CSIRO-Mk3-6-0", + "CNRM-CM5", "CMCC-CM", "CESM1-CAM5", "CESM1-BGC", "CCSM4", "BNU-ESM", + "ACCESS1-0"), + id_scen = c("historical", "rcp26", "rcp45", "rcp60", "rcp85"), + id_run = NULL, + id_time = NULL + ) + climDB_files <- NULL print_int <- 1 } - #Force dataset specific lower/uper case for GCMs and RCPs - reqGCMs <- gcmsDB[match(tolower(reqGCMs), tolower(gcmsDB), nomatch=NA)] - reqRCPs <- scenariosDB[match(tolower(reqRCPs), tolower(scenariosDB), nomatch=NA)] - reqRCPsPerGCM <- lapply(reqRCPsPerGCM, FUN=function(r) scenariosDB[match(tolower(r), tolower(scenariosDB), nomatch=NA)]) + #Force dataset specific lower/uper case for GCMs and RCPs, i.e., use values from climbDB_struct and not reqGCMs and reqRCPs + reqGCMs <- as.character(climDB_struct[["id_gcm"]][match(tolower(reqGCMs), tolower(climDB_struct[["id_gcm"]]), nomatch = NA)]) + reqRCPs <- as.character(climDB_struct[["id_scen"]][match(tolower(reqRCPs), tolower(climDB_struct[["id_scen"]]), nomatch = NA)]) + reqRCPsPerGCM <- lapply(reqRCPsPerGCM, function(r) + as.character(climDB_struct[["id_scen"]][match(tolower(r), tolower(climDB_struct[["id_scen"]]), nomatch = NA)])) - #Tests that all requested conditions will be extracted - stopifnot(length(reqGCMs) > 0, all(!is.na(reqGCMs))) - stopifnot(length(reqRCPs) > 0, all(!is.na(reqRCPs)), any(grepl("historical", scenariosDB, ignore.case=TRUE))) + #Tests that all requested conditions will be extracted + stopifnot(length(reqGCMs) > 0, all(!is.na(reqGCMs))) + stopifnot(length(reqRCPs) > 0, all(!is.na(reqRCPs)), + any(grepl("historic", climDB_struct[["id_scen"]], ignore.case = TRUE))) #put requests together locations <- SWRunInformation[do_SWRun_sites, c("X_WGS84", "Y_WGS84", "site_id", "WeatherFolder")] #locations of simulation runs requestN <- length(reqGCMs) * nrow(locations) - if (!be.quiet) print(paste("'", GCM_source, "' will run", requestN, "times")) + if (!be.quiet) print(paste(shQuote(clim_source), "will run", requestN, "times")) - #bounding box - bbox <- data.frame(matrix(NA, nrow=2, ncol=2, dimnames=list(NULL, c("lat", "lon")))) - if (is_NEX) { - bbox$lat <- c(24.0625, 49.9375) - bbox$lon <- c(-125.02083333, -66.47916667) - } - if (GCM_source == "CMIP3_BCSD_GDODCPUCLLNL_USA" || GCM_source == "CMIP5_BCSD_GDODCPUCLLNL_USA") { - bbox$lat <- c(25.125, 52.875) - bbox$lon <- c(-124.625, -67) - } - if (GCM_source == "CMIP3_BCSD_GDODCPUCLLNL_Global" || GCM_source == "CMIP5_BCSD_GDODCPUCLLNL_Global") { - bbox$lat <- c(-55.25-0.25, 83.25+0.25) - bbox$lon <- c(-179.75-0.25, 179.75+0.25) - } - - #time box - tbox <- data.frame(matrix(NA, nrow=2, ncol=2, dimnames=list(c("start", "end"), c("first", "second")))) - if (is_NEX || GCM_source == "CMIP5_BCSD_GDODCPUCLLNL_USA" || GCM_source == "CMIP5_BCSD_GDODCPUCLLNL_Global") { - tbox$first <- c(1950, 2005) - tbox$second <- c(2006, 2099) - } - if (GCM_source == "CMIP3_BCSD_GDODCPUCLLNL_USA" || GCM_source == "CMIP3_BCSD_GDODCPUCLLNL_Global") { - tbox$first <- c(1950, 1999) - tbox$second <- c(2000, 2099) - } #timing: time slices: data is organized into 'historical' runs 1950-2005 (="first") and future 'rcp' runs 2006-2099 (="second") timeSlices <- data.frame(matrix(NA, ncol=4, nrow = 4 + 4 * nrow(future_yrs), dimnames = list(NULL, c("Run", "Slice", "Time", "Year")))) timeSlices[, 1:3] <- expand.grid(c("start", "end"), c("first", "second"), c("historical", rownames(future_yrs)))[, 3:1] #historic conditions for downscaling - timeSlices[1, 4] <- max(tbox$first[1], DScur_startyr) - timeSlices[2, 4] <- min(tbox$first[2], DScur_endyr) - if (DScur_endyr > tbox$first[2]) { - timeSlices[3, 4] <- tbox$second[1] - timeSlices[4, 4] <- min(tbox$second[2], DScur_endyr) + timeSlices[1, 4] <- max(climDB_meta[["tbox"]]["start", "first"], DScur_startyr) + timeSlices[2, 4] <- min(climDB_meta[["tbox"]]["end", "first"], DScur_endyr) + if (DScur_endyr > climDB_meta[["tbox"]]["end", "first"]) { + timeSlices[3, 4] <- climDB_meta[["tbox"]]["start", "second"] + timeSlices[4, 4] <- min(climDB_meta[["tbox"]]["end", "second"], DScur_endyr) } #future conditions for downscaling for (it in 1:nrow(future_yrs)) { - timeSlices[3 + 4*it, 4] <- max(tbox$second[1], future_yrs[it, "DSfut_startyr"]) - timeSlices[4 + 4*it, 4] <- min(tbox$second[2], future_yrs[it, "DSfut_endyr"]) #limits timeSlices to 2099 + timeSlices[3 + 4*it, 4] <- max(climDB_meta[["tbox"]]["start", "second"], future_yrs[it, "DSfut_startyr"]) + timeSlices[4 + 4*it, 4] <- min(climDB_meta[["tbox"]]["end", "second"], future_yrs[it, "DSfut_endyr"]) #limits timeSlices to 2099 if (DScur_startyr < 1950) { #TODO(drs): I don't know where the hard coded value of 1950 comes from; it doesn't make sense to me print("Note: adjustment to 'timeSlices' because 'DScur_startyr < 1950'") timeSlices[4 + 4*it, 4] <- min(timeSlices[4 + 4*it, 4], timeSlices[4 + 3*it, 4]+(timeSlices[4,4]-timeSlices[1,4])) } - if (future_yrs[it, "DSfut_startyr"] < tbox$second[1]) { - timeSlices[1 + 4*it, 4] <- max(tbox$first[1], future_yrs[it, "DSfut_startyr"]) - timeSlices[2 + 4*it, 4] <- tbox$second[1] + if (future_yrs[it, "DSfut_startyr"] < climDB_meta[["tbox"]]["start", "second"]) { + timeSlices[1 + 4*it, 4] <- max(climDB_meta[["tbox"]]["start", "first"], future_yrs[it, "DSfut_startyr"]) + timeSlices[2 + 4*it, 4] <- climDB_meta[["tbox"]]["start", "second"] } } #get unique time slices @@ -2459,11 +2758,20 @@ if (exinfo$GDODCPUCLLNL || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_U getYears <- list(n_first = nrow(temp1), first = temp1, n_second = nrow(temp2), second = temp2) #Monthly time-series - getYears$first_dates <- lapply(1:getYears$n_first, FUN=function(it) as.POSIXlt(seq(from=ISOdate(getYears$first[it, 1], 1, 1, tz = "UTC"), to=ISOdate(getYears$first[it, 2], 12, 31, tz = "UTC"), by="1 month"))) - getYears$second_dates <- lapply(1:getYears$n_second, FUN=function(it) as.POSIXlt(seq(from=ISOdate(getYears$second[it, 1], 1, 1, tz = "UTC"), to=ISOdate(getYears$second[it, 2], 12, 31, tz = "UTC")), by="1 month"))) - #Days per month - getYears$first_dpm <- lapply(1:getYears$n_first, FUN=function(it) rle(as.POSIXlt(seq(from=ISOdate(getYears$first[it, 1], 1, 1, tz = "UTC"), to=ISOdate(getYears$first[it, 2], 12, 31, tz = "UTC")), by="1 day"))$mon)$lengths) - getYears$second_dpm <- lapply(1:getYears$n_second, FUN=function(it) rle(as.POSIXlt(seq(from=ISOdate(getYears$second[it, 1], 1, 1, tz = "UTC"), to=ISOdate(getYears$second[it, 2], 12, 31, tz = "UTC")), by="1 day"))$mon)$lengths) + temp1 <- list(ISOdate(getYears$first[, 1], 1, 1, tz = "UTC"), + ISOdate(getYears$first[, 2], 12, 31, tz = "UTC")) + temp2 <- list(ISOdate(getYears$second[, 1], 1, 1, tz = "UTC"), + ISOdate(getYears$second[, 2], 12, 31, tz = "UTC")) + + getYears$first_dates <- lapply(seq_len(getYears$n_first), function(it) + as.POSIXlt(seq(from = temp1[[1]][it], to = temp1[[2]][it], by = "1 month"))) + getYears$second_dates <- lapply(seq_len(getYears$n_second), function(it) + as.POSIXlt(seq(from = temp2[[1]][it], to = temp2[[2]][it], by = "1 month"))) + #Days per month + getYears$first_dpm <- lapply(seq_len(getYears$n_first), function(it) + rle(as.POSIXlt(seq(from = temp1[[1]][it], to = temp1[[2]][it], by = "1 day"))$mon)$lengths) + getYears$second_dpm <- lapply(seq_len(getYears$n_second), function(it) + rle(as.POSIXlt(seq(from = temp2[[1]][it], to = temp2[[2]][it], by = "1 day"))$mon)$lengths) #Logical on how to select from getYears assocYears <- vector("list", length = 1 + length(reqRCPs) * nrow(future_yrs)) @@ -2476,61 +2784,53 @@ if (exinfo$GDODCPUCLLNL || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_U names(assocYears) <- names_assocYears print(paste("Future scenario data will be extracted for a time period spanning ", timeSlices[7,4], "through", max(na.omit(timeSlices[,4])))) - #Variable tags - if (is_NEX) { - varTags <- c(prcp = "pr", tmin = "tasmin", tmax = "tasmax") #units c("kg/m2/s", "K", "K") --> SoilWat required units c("cm/day", "C", "C") - fileVarTags <- NULL - } - if (GCM_source == "CMIP3_BCSD_GDODCPUCLLNL_USA" || GCM_source == "CMIP3_BCSD_GDODCPUCLLNL_Global") { - varTags <- c(prcp = "Prcp", tmean = "Tavg", tmin = "Tmin", tmax = "Tmax") - fileVarTags <- paste("monthly", varTags, sep=".") - names(fileVarTags) <- names(varTags) - } - if (GCM_source == "CMIP5_BCSD_GDODCPUCLLNL_USA" || GCM_source == "CMIP5_BCSD_GDODCPUCLLNL_Global") { - varTags <- c(prcp = "pr", tmean = "tas", tmin = "tasmin", tmax = "tasmax") - fileVarTags <- paste0("_", varTags, "_") - names(fileVarTags) <- names(varTags) - } - #objects that need exporting to workers - list.export <- c("dir.out.temp", "GCM_source", "is_GDODCPUCLLNL", "is_NEX", "reqGCMs", "reqRCPsPerGCM", "reqDownscalingsPerGCM", "locations", "climScen", "varTags", "be.quiet", "getYears", "assocYears", "future_yrs", "DScur_startyr", "DScur_endyr", "simstartyr", "endyr", "dbWeatherDataFile", "climate.ambient", "dbW_iSiteTable", "dbW_iScenarioTable", "dbW_compression_type", "bbox", "print_int", - "calc.ScenarioWeather", "get_GCMdata", "get.DBvariable", - "downscale.raw", "downscale.delta", "downscale.deltahybrid", "downscale.deltahybrid3mod", "downscale.periods", - "in_GMC_box", "unique_times", "useSlices", "erf", "add_delta_to_PPT", "fix_PPTdata_length", "calc_Days_withLoweredPPT", "controlExtremePPTevents", "test_sigmaNormal", "test_sigmaGamma", "stretch_values", - "applyDeltas", "applyPPTdelta_simple", "applyPPTdelta_detailed", "applyDelta_oneYear", "applyDeltas2", - "doQmapQUANT.default_drs", "tol", "doQmapQUANT_drs", "applyDeltas2", "downscaling.options") + list.export <- c("add_delta_to_PPT", "applyDelta_oneYear", "applyDeltas", "applyDeltas2", + "applyDeltas2", "applyPPTdelta_detailed", "applyPPTdelta_simple", + "be.quiet", "calc_Days_withLoweredPPT", + "calc.ScenarioWeather", "climate.ambient", "climScen", + "controlExtremePPTevents", "dbW_compression_type", "dbW_iScenarioTable", + "dbW_iSiteTable", "dbWeatherDataFile", "dir.out.temp", "doQmapQUANT_drs", + "doQmapQUANT.default_drs", "downscale.delta", "downscale.deltahybrid", + "downscale.deltahybrid3mod", "downscale.periods", "downscale.raw", + "opt_DS", "DScur_endyr", "DScur_startyr", "endyr", + "erf", "fix_PPTdata_length", "future_yrs", "get_GCMdata", "get.DBvariable", + "getYears", "convert_precipitation", "convert_temperature", + "print.debug", "print_int", "simstartyr", "stretch_values", "test_sigmaGamma", + "test_sigmaNormal", "tol", "unique_times", "useSlices") if (is_NEX) { - list.export <- c(list.export, "saveNEXtempfiles", "useRCurl", - "mmPerSecond_to_cmPerMonth", "get.request") + list.export <- c(list.export, "get.request") } - if (is_GDODCPUCLLNL) { - list.export <- c(list.export, "fileVarTags", "gcmFiles", - "mmPerDay_to_cmPerMonth", "whereNearest", "get.TimeIndices", "get.SpatialIndices", "do_ncvar_get") + if (is_netCDF) { + list.export <- c(list.export, "whereNearest", "get.TimeIndices", + "get.SpatialIndices", "do_ncvar_get") } - #Repeat call to get climate data for all requests until complete repeatN <- 0 i_AllToDo <- seq_len(requestN) i_Done <- NULL - logFile <- file.path(dir.out, paste0("extractionsDone_", GCM_source, ".rds")) + logFile <- file.path(dir.out, paste0("extractionsDone_", clim_source, ".rds")) if (file.exists(logFile)) { i_Done <- sort(unique(c(i_Done, readRDS(file=logFile)))) } - temp.files <- list.files(path=dir.out.temp, pattern=GCM_source, recursive=TRUE, include.dirs=FALSE, no..=TRUE) + temp.files <- list.files(path=dir.out.temp, pattern=clim_source, recursive=TRUE, include.dirs=FALSE, no..=TRUE) if (length(temp.files) > 0) { - i_Done <- sort(unique(c(i_Done, as.integer(unlist(strsplit(unlist(strsplit(temp.files, split="_", fixed=TRUE))[c(FALSE, TRUE)], split=".", fixed=TRUE))[c(TRUE, FALSE)])))) + # extract i_done number from file name + temp <- lapply(strsplit(temp.files, split = "_", fixed = TRUE), function(x) x[length(x)]) + temp <- lapply(strsplit(unlist(temp), split = ".", fixed = TRUE), function(x) x[1]) + i_Done <- sort(unique(c(i_Done, as.integer(unlist(temp))))) } while (repeatExtractionLoops_maxN > repeatN && length(i_ToDo <- if (length(i_Done) > 0) i_AllToDo[-i_Done] else i_AllToDo) > 0) { repeatN <- repeatN + 1 - if (!be.quiet) print(paste("'", GCM_source, "' will run the", repeatN, ". time to extract", length(i_ToDo), "requests" )) + if (!be.quiet) print(paste(shQuote(clim_source), "will run the", repeatN, ". time to extract", length(i_ToDo), "requests" )) - out <- tryToGet_ClimDB(is_ToDo = i_ToDo, - list.export = list.export, - GCM_source, is_GDODCPUCLLNL, is_NEX, gcmFiles, reqGCMs, reqRCPsPerGCM, reqDownscalingsPerGCM, bbox, locations, varTags, fileVarTags, getYears, assocYears, saveNEXtempfiles) + out <- tryToGet_ClimDB(is_ToDo = i_ToDo, list.export = list.export, + clim_source, is_netCDF, is_NEX, climDB_meta, climDB_files, reqGCMs, reqRCPsPerGCM, + reqDownscalingsPerGCM, locations, getYears, assocYears) i_Done <- sort(unique(c(i_Done, out))) saveRDS(i_Done, file = logFile) @@ -2539,7 +2839,7 @@ if (exinfo$GDODCPUCLLNL || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_U # Determine progress if (length(i_Done) > 0) { - if (!be.quiet) print(paste(GCM_source, "was extracted for n =", length(i_Done), "out of", length(i_AllToDo), "downscaling requests")) + if (!be.quiet) print(paste(clim_source, "was extracted for n =", length(i_Done), "out of", length(i_AllToDo), "downscaling requests")) ils_done <- unique((i_Done - 1) %/% length(reqGCMs) + 1) include_YN_climscen[do_SWRun_sites][ils_done] <- 1 @@ -2551,20 +2851,20 @@ if (exinfo$GDODCPUCLLNL || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_U #Clean up: report unfinished locations, etc. if (length(i_ToDo) > 0) { - print(paste(length(i_ToDo), "sites didn't extract climate scenario information by '", GCM_source, "'")) + print(paste(length(i_ToDo), "sites didn't extract climate scenario information by '", clim_source, "'")) ils_notdone <- unique((i_ToDo - 1) %/% length(reqGCMs) + 1) failedLocations_DB <- locations[ils_notdone, ] include_YN_updateFailed <- include_YN include_YN_updateFailed[do_SWRun_sites][ils_notdone] <- 0 - save(failedLocations_DB, include_YN_updateFailed, file=file.path(dir.in, paste0("ClimDB_failedLocations_", GCM_source, ".RData"))) + save(failedLocations_DB, include_YN_updateFailed, file=file.path(dir.in, paste0("ClimDB_failedLocations_", clim_source, ".RData"))) rm(failedLocations_DB, include_YN_updateFailed, ils_notdone) } - if (!be.quiet) print(paste("Finished '", GCM_source, "' at", Sys.time())) + if (!be.quiet) print(paste("Finished '", clim_source, "' at", Sys.time())) - rm(locations, requestN, i_Done, i_ToDo, i_AllToDo, varTags, timeSlices, getYears, assocYears, gcmsDB, scenariosDB) + rm(locations, requestN, i_Done, i_ToDo, i_AllToDo, timeSlices, getYears, assocYears, climDB_struct) include_YN_climscen }) @@ -2572,40 +2872,45 @@ if (exinfo$GDODCPUCLLNL || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_U # keep track of successful/unsuccessful climate scenarios include_YN_climscen <- rep(0, runsN_master) - # loop through data sources - for (isource in sort(unique(sites_GCM_source))) { - include_YN_climscen <- get_climatechange_data(GCM_source = isource, - is_GDODCPUCLLNL = grepl("BCSD_GDODCPUCLLNL", isource), - is_NEX = isource == "CMIP5_BCSD_NEX_USA", - do_SWRun_sites = runIDs_sites[sites_GCM_source == isource], - include_YN_climscen = include_YN_climscen) - } + # loop through data sources + for (clim_source in opt_climsc_extr) { + do_SWRun_sites <- runIDs_sites[sites_GCM_source == clim_source] + + if (length(do_SWRun_sites) > 0) + include_YN_climscen <- get_climatechange_data(clim_source = clim_source, + is_netCDF = grepl("(BCSD_GDODCPUCLLNL)|(SageSeer)", clim_source), + is_NEX = grepl("NEX", clim_source), + do_SWRun_sites = do_SWRun_sites, + include_YN_climscen = include_YN_climscen, + climDB_meta = climDB_metas[[clim_source]]) + } SWRunInformation$Include_YN_ClimateScenarioSources <- include_YN_climscen write.csv(SWRunInformation, file=file.path(dir.in, datafile.SWRunInformation), row.names=FALSE) unlink(file.path(dir.in, datafile.SWRWinputs_preprocessed)) - rm(sites_GCM_source, xy, i_use, include_YN_climscen) + rm(sites_GCM_source, xy, include_YN_climscen, do_SWRun_sites, clim_source) } #-CMIP3_ClimateWizardEnsembles does not allow for multiple sources -if ( exinfo$ExtractClimateChangeScenarios_CMIP3_ClimateWizardEnsembles_Global || exinfo$ExtractClimateChangeScenarios_CMIP3_ClimateWizardEnsembles_USA) { +if (exinfo$ExtractClimateChangeScenarios && any(exinfo$which_ClimateWizard)) { + stopifnot(require("raster")) if (!be.quiet) print(paste("Started 'ExtractClimateChangeScenarios_CMIP3_ClimateWizardEnsembles' at", Sys.time())) list.scenarios.datafile <- climate.conditions[!grepl(climate.ambient, climate.conditions)] if (length(list.scenarios.datafile) > 0) { #extracts only information requested in the 'datafile.SWRunInformation' - if (exinfo$ExtractClimateChangeScenarios_CMIP3_ClimateWizardEnsembles_Global) { + if (any("CMIP3_ClimateWizardEnsembles_Global" == opt_climsc_extr)) { #Maurer EP, Adam JC, Wood AW (2009) Climate model based consensus on the hydrologic impacts of climate change to the Rio Lempa basin of Central America. Hydrology and Earth System Sciences, 13, 183-194. #accessed via climatewizard.org on July 10, 2012 - dir.ex.dat <- file.path(dir.external, "ExtractClimateChangeScenarios", "ClimateWizard_CMIP3", "Global") + dir.ex.dat <- file.path(dir.ex.fut, "ClimateScenarios", "ClimateWizardEnsembles_Global") } - if (exinfo$ExtractClimateChangeScenarios_CMIP3_ClimateWizardEnsembles_USA) { + if (any("CMIP3_ClimateWizardEnsembles_USA" == opt_climsc_extr)) { #Maurer, E. P., L. Brekke, T. Pruitt, and P. B. Duffy. 2007. Fine-resolution climate projections enhance regional climate change impact studies. Eos Transactions AGU 88:504. #accessed via climatewizard.org - dir.ex.dat <- file.path(dir.external, "ExtractClimateChangeScenarios", "ClimateWizard_CMIP3", "USA") + dir.ex.dat <- file.path(dir.ex.fut, "ClimateScenarios", "ClimateWizardEnsembles_USA") } list.scenarios.external <- basename(list.dirs2(path=dir.ex.dat, full.names=FALSE, recursive=FALSE)) @@ -2617,11 +2922,11 @@ if ( exinfo$ExtractClimateChangeScenarios_CMIP3_ClimateWizardEnsembles_Global || for (sc in seq_along(list.scenarios.datafile)) { dir.ex.dat.sc <- file.path(dir.ex.dat, list.scenarios.datafile[sc]) temp <- basename(list.dirs2(path=dir.ex.dat.sc, full.names=FALSE, recursive=FALSE)) - if (exinfo$ExtractClimateChangeScenarios_CMIP3_ClimateWizardEnsembles_Global) { + if ("CMIP3_ClimateWizardEnsembles_Global" == opt_climsc_extr) { dir.ex.dat.sc.ppt <- file.path(dir.ex.dat.sc, temp[grepl(pattern="Precipitation_Value", x=temp)]) dir.ex.dat.sc.temp <- file.path(dir.ex.dat.sc, temp[grepl(pattern="Tmean_Value", x=temp)]) } - if (exinfo$ExtractClimateChangeScenarios_CMIP3_ClimateWizardEnsembles_USA) { + if ("CMIP3_ClimateWizardEnsembles_USA" == opt_climsc_extr) { dir.ex.dat.sc.ppt <- file.path(dir.ex.dat.sc, temp[grepl(pattern="Precipitation_Change", x=temp)]) dir.ex.dat.sc.temp <- file.path(dir.ex.dat.sc, temp[grepl(pattern="Tmean_Change", x=temp)]) } @@ -2637,7 +2942,7 @@ if ( exinfo$ExtractClimateChangeScenarios_CMIP3_ClimateWizardEnsembles_Global || sc.temp <- sapply(st_mo, FUN=function(m) get.month(path=dir.ex.dat.sc.temp, grid=list.temp.asc[grepl(pattern=paste("_", m, "_", sep=""), x=list.temp.asc)])) sc.ppt <- sapply(st_mo, FUN=function(m) get.month(path=dir.ex.dat.sc.ppt, grid=list.ppt.asc[grepl(pattern=paste("_", m, "_", sep=""), x=list.temp.asc)])) - if (exinfo$ExtractClimateChangeScenarios_CMIP3_ClimateWizardEnsembles_Global) { + if ("CMIP3_ClimateWizardEnsembles_Global" == opt_climsc_extr) { #temp value in C #ppt value in mm #add data to sw_input_climscen and set the use flags @@ -2648,7 +2953,7 @@ if ( exinfo$ExtractClimateChangeScenarios_CMIP3_ClimateWizardEnsembles_Global || sw_input_climscen_values_use[i.temp] <- TRUE sw_input_climscen_values[, i.temp] <- sc.temp } - if (exinfo$ExtractClimateChangeScenarios_CMIP3_ClimateWizardEnsembles_USA) { + if ("CMIP3_ClimateWizardEnsembles_USA" == opt_climsc_extr) { sc.temp <- sc.temp * 5/9 #temp addand in C sc.ppt <- 1 + sc.ppt/100 #ppt change as factor #add data to sw_input_climscen and set the use flags @@ -2703,6 +3008,7 @@ if (exinfo$ExtractSoilDataFromCONUSSOILFromSTATSGO_USA || exinfo$ExtractSoilData #Miller, D. A. and R. A. White. 1998. A conterminous United States multilayer soil characteristics dataset for regional climate and hydrology modeling. Earth Interactions 2:1-26. #CONUS-SOIL: rasterized and controlled STATSGO data; information for 11 soil layers available # Note(drs): it appears that NODATA values are recorded as 0 + stopifnot(require("raster")) do_extract[[1]] <- is.na(sites_externalsoils_source) | sites_externalsoils_source == "CONUSSOILFromSTATSGO_USA" @@ -2960,7 +3266,7 @@ if (exinfo$ExtractSoilDataFromCONUSSOILFromSTATSGO_USA || exinfo$ExtractSoilData coord = sp::coordinates(sp_sites[i, ]), to_res = if (is.null(dim(res))) res else res[i, ], with_weights = TRUE, - method = "block"), silent = TRUE) + method = "block")) if (inherits(out, "try-error")) { if (print.debug) print(out) @@ -3261,6 +3567,8 @@ if (exinfo$ExtractSoilDataFromCONUSSOILFromSTATSGO_USA || exinfo$ExtractSoilData #--------------------------------------------------------------------------------------------------# #------EXTRACT ELEVATION------ if (exinfo$ExtractElevation_NED_USA || exinfo$ExtractElevation_HWSD_Global) { + stopifnot(require("raster")) + #allow for multiple sources if (extract_determine_database == "SWRunInformation" && "Elevation_source" %in% colnames(SWRunInformation)) { sites_elevation_source <- SWRunInformation$Elevation_source[runIDs_sites] @@ -3463,6 +3771,7 @@ if (exinfo$ExtractSkyDataFromNOAAClimateAtlas_USA || exinfo$ExtractSkyDataFromNC if (exinfo$ExtractSkyDataFromNOAAClimateAtlas_USA) { if (!be.quiet) print(paste("Started 'ExtractSkyDataFromNOAAClimateAtlas_USA' at", Sys.time())) + stopifnot(require("rgdal")) do_extract[[1]] <- has_incompletedata(monthlyclim) | is.na(sites_monthlyclim_source) | sites_monthlyclim_source == "ClimateNormals_NCDC2005_USA" @@ -3667,8 +3976,7 @@ if (exinfo$ExtractSkyDataFromNOAAClimateAtlas_USA || exinfo$ExtractSkyDataFromNC parallel_backend = parallel_backend, cl = if (identical(parallel_backend, "snow")) cl else NULL, rm_mc_files = TRUE, - continueAfterAbort = continueAfterAbort), - silent = TRUE) + continueAfterAbort = continueAfterAbort)) if (inherits(temp, "try-error")) stop(temp) #match weather folder names in case of missing extractions diff --git a/R/2_SWSF_p4of5_Code_v51.R b/R/2_SWSF_p4of5_Code_v51.R index 7d3b5d81..cf6d12ed 100644 --- a/R/2_SWSF_p4of5_Code_v51.R +++ b/R/2_SWSF_p4of5_Code_v51.R @@ -62,9 +62,9 @@ ftemp <- file.path(dir.out, timerfile) times <- if (!file.exists(ftemp) || !continueAfterAbort) { cat("0,NA", file = ftemp, sep = "\n") } else { - swsf_read_csv(file = ftemp, header = FALSE, colClasses = c("NULL", "numeric"))[[1]] + swsf_read_csv(file = ftemp, header = FALSE, colClasses = c("integer", "NULL"), skip = 1)[[1]] } -runIDs_done <- if (length(times) > 1) sort(times[-1]) else NULL +runIDs_done <- if (length(times) > 0) sort(times) else NULL #timing: output for overall timing information timerfile2 <- "Timing_Simulation.csv" @@ -359,37 +359,58 @@ if (usePreProcessedInput && file.exists(file.path(dir.in, datafile.SWRWinputs_pr tr_cloud[basename(temp)] <-unlist(lapply(temp,FUN=function(x) return(swReadLines(swClear(new("swCloud")),x)))) } - if (any(create_treatments == "LookupClimatePPTScenarios")) - tr_input_climPPT <- swsf_read_csv(file.path(dir.sw.in.tr, "LookupClimatePPTScenarios", trfile.LookupClimatePPTScenarios)) - if (any(create_treatments == "LookupClimateTempScenarios")) - tr_input_climTemp <- swsf_read_csv(file.path(dir.sw.in.tr, "LookupClimateTempScenarios", trfile.LookupClimateTempScenarios)) - if (any(create_treatments == "LookupShiftedPPTScenarios")) - tr_input_shiftedPPT <- swsf_read_csv(file.path(dir.sw.in.tr, "LookupShiftedPPTScenarios", trfile.LookupShiftedPPTScenarios), row.names=1) - if (any(create_treatments == "LookupEvapCoeffFromTable")) - tr_input_EvapCoeff <- swsf_read_csv(file.path(dir.sw.in.tr, "LookupEvapCoeffFromTable", trfile.LookupEvapCoeffFromTable), row.names=1) - if (any(create_treatments == "LookupTranspCoeffFromTable_Grass", create_treatments == "LookupTranspCoeffFromTable_Shrub", create_treatments == "LookupTranspCoeffFromTable_Tree", create_treatments == "LookupTranspCoeffFromTable_Forb", create_treatments == "AdjRootProfile")) { - tr_input_TranspCoeff_Code <- tryCatch(read.csv(temp <- file.path(dir.sw.in.tr, "LookupTranspCoeffFromTable", trfile.LookupTranspCoeffFromTable), nrows=2), error=function(e) { print("LookupTranspCoeffFromTable.csv: Bad Path"); print(e)}) + if (any(create_treatments == "LookupClimatePPTScenarios")) { + temp <- file.path(dir.sw.in.tr, "LookupClimatePPTScenarios", trfile.LookupClimatePPTScenarios) + tr_input_climPPT <- swsf_read_csv(temp) + } + if (any(create_treatments == "LookupClimateTempScenarios")) { + temp <- file.path(dir.sw.in.tr, "LookupClimateTempScenarios", trfile.LookupClimateTempScenarios) + tr_input_climTemp <- swsf_read_csv(temp) + } + if (any(create_treatments == "LookupShiftedPPTScenarios")) { + temp <- file.path(dir.sw.in.tr, "LookupShiftedPPTScenarios", trfile.LookupShiftedPPTScenarios) + tr_input_shiftedPPT <- swsf_read_csv(temp, row.names = 1) + } + if (any(create_treatments == "LookupEvapCoeffFromTable")) { + temp <- file.path(dir.sw.in.tr, "LookupEvapCoeffFromTable", trfile.LookupEvapCoeffFromTable) + tr_input_EvapCoeff <- swsf_read_csv(temp, row.names = 1) + } + + if (any(grepl("LookupTranspCoeffFromTable_", create_treatments), + create_treatments == "AdjRootProfile")) { + temp <- file.path(dir.sw.in.tr, "LookupTranspCoeffFromTable", trfile.LookupTranspCoeffFromTable) + tr_input_TranspCoeff_Code <- tryCatch(read.csv(temp, nrows = 2, stringsAsFactors = FALSE), error = print) tr_input_TranspCoeff_Code <- tr_input_TranspCoeff_Code[-2,] - tr_input_TranspCoeff <- read.csv(temp, skip=2) + tr_input_TranspCoeff <- read.csv(temp, skip = 2, stringsAsFactors = FALSE) colnames(tr_input_TranspCoeff) <- colnames(tr_input_TranspCoeff_Code) } - if(any(create_treatments == "LookupTranspRegionsFromTable")) tr_input_TranspRegions <- read.csv( file.path(dir.sw.in.tr, "LookupTranspRegionsFromTable", trfile.LookupTranspRegionsFromTable), row.names=1) - if(any(create_treatments == "LookupSnowDensityFromTable")) tr_input_SnowD <- read.csv( file.path(dir.sw.in.tr, "LookupSnowDensityFromTable", trfile.LookupSnowDensityFromTable), row.names=1) - if(any(create_treatments == "AdjMonthlyBioMass_Temperature")) tr_VegetationComposition <- read.csv(file.path(dir.sw.in.tr, "LookupVegetationComposition", trfile.LookupVegetationComposition), skip=1, row.names=1) + + if (any(create_treatments == "LookupTranspRegionsFromTable")) { + temp <- file.path(dir.sw.in.tr, "LookupTranspRegionsFromTable", trfile.LookupTranspRegionsFromTable) + tr_input_TranspRegions <- read.csv(temp, row.names = 1, stringsAsFactors = FALSE) + } + if (any(create_treatments == "LookupSnowDensityFromTable")) { + temp <- file.path(dir.sw.in.tr, "LookupSnowDensityFromTable", trfile.LookupSnowDensityFromTable) + tr_input_SnowD <- read.csv(temp, row.names = 1, stringsAsFactors = FALSE) + } + if (any(create_treatments == "AdjMonthlyBioMass_Temperature")) { + temp <- file.path(dir.sw.in.tr, "LookupVegetationComposition", trfile.LookupVegetationComposition) + tr_VegetationComposition <- read.csv(temp, skip = 1, row.names = 1, stringsAsFactors = FALSE) + } #-import regeneration data param.species_regeneration <- list() - if(any(simulation_timescales=="daily") & aon$dailyRegeneration_GISSM) { + if(aon$dailyRegeneration_GISSM) { list.species_regeneration <- list.files(dir.sw.in.reg, pattern=".csv") no.species_regeneration <- length(list.species_regeneration) if(no.species_regeneration > 0){ - f.temp <- read.csv(file.path(dir.sw.in.reg, list.species_regeneration[1])) + f.temp <- read.csv(file.path(dir.sw.in.reg, list.species_regeneration[1]), stringsAsFactors = FALSE) param.species_regeneration <- matrix(NA, ncol=no.species_regeneration, nrow=nrow(f.temp)) colnames(param.species_regeneration) <- sub(".csv", "", list.species_regeneration) rownames(param.species_regeneration) <- f.temp[, 1] param.species_regeneration[, 1] <- f.temp[, 2] if(no.species_regeneration > 1) for(f in 2:no.species_regeneration){ - f.temp <- read.csv(file.path(dir.sw.in.reg, list.species_regeneration[f])) + f.temp <- read.csv(file.path(dir.sw.in.reg, list.species_regeneration[f]), stringsAsFactors = FALSE) param.species_regeneration[, f] <- f.temp[, 2] } rm(f.temp) @@ -447,14 +468,12 @@ if(length(create_treatments) > 0) { } daily_no <- length(output_aggregate_daily) -if(any(simulation_timescales=="daily")){ - if(any(output_aggregate_daily == "SWAbulk") & length(SWPcrit_MPa) > 0){ - output_aggregate_daily <- output_aggregate_daily[-which(output_aggregate_daily == "SWAbulk")] - for(icrit in seq(along=SWPcrit_MPa)){ - output_aggregate_daily <- c(output_aggregate_daily, paste("SWAbulkatSWPcrit", abs(round(-1000*SWPcrit_MPa[icrit], 0)), "kPa", sep="")) - } - daily_no <- length(output_aggregate_daily) - } +if(any(output_aggregate_daily == "SWAbulk") & length(SWPcrit_MPa) > 0){ + output_aggregate_daily <- output_aggregate_daily[-which(output_aggregate_daily == "SWAbulk")] + for(icrit in seq(along=SWPcrit_MPa)){ + output_aggregate_daily <- c(output_aggregate_daily, paste("SWAbulkatSWPcrit", abs(round(-1000*SWPcrit_MPa[icrit], 0)), "kPa", sep="")) + } + daily_no <- length(output_aggregate_daily) # if(daily_lyr_agg[["do"]]){ # aggLs_no <- 2 + ifelse(is.null(daily_lyr_agg[["third_cm"]]), 1, ifelse(!is.na(daily_lyr_agg[["third_cm"]]), 1, 0)) + ifelse(is.null(daily_lyr_agg[["fourth_cm"]]), 1, ifelse(!is.na(daily_lyr_agg[["fourth_cm"]]), 1, 0)) @@ -610,11 +629,6 @@ if (extract_determine_database == "SWRunInformation" && "dailyweather_source" %i weather.digits <- 2 -lwf_cond1 <- sw_input_treatments_use["LookupWeatherFolder"] && sum(is.na(sw_input_treatments$LookupWeatherFolder[runIDs_sites])) == 0 -lwf_cond2 <- (sum(is.na(SWRunInformation$WeatherFolder[runIDs_sites])) == 0) && !any(exinfo$GriddedDailyWeatherFromMaurer2002_NorthAmerica, exinfo$GriddedDailyWeatherFromDayMet_USA, exinfo$GriddedDailyWeatherFromNRCan_10km_Canada, exinfo$GriddedDailyWeatherFromNCEPCFSR_Global) -lwf_cond3 <- sw_input_experimentals_use["LookupWeatherFolder"] && sum(is.na(sw_input_experimentals$LookupWeatherFolder)) == 0 -lwf_cond4 <- any(create_treatments == "LookupWeatherFolder") - if(exinfo$GriddedDailyWeatherFromMaurer2002_NorthAmerica){ #extract daily weather information for the grid cell coded by latitude/longitude for each simulation run @@ -650,7 +664,15 @@ if(exinfo$GriddedDailyWeatherFromNRCan_10km_Canada && createAndPopulateWeatherDa -if(do_weather_source){ +if (do_weather_source) { + lwf_cond1 <- sw_input_treatments_use["LookupWeatherFolder"] && + !anyNA(sw_input_treatments$LookupWeatherFolder[runIDs_sites]) + lwf_cond2 <- !anyNA(SWRunInformation$WeatherFolder[runIDs_sites]) && + !any(grepl("GriddedDailyWeatherFrom", names(exinfo)[unlist(exinfo)])) + lwf_cond3 <- sw_input_experimentals_use["LookupWeatherFolder"] && + !anyNA(sw_input_treatments$LookupWeatherFolder) + lwf_cond4 <- any(create_treatments == "LookupWeatherFolder") + #Functions to determine sources of daily weather; they write to global 'sites_dailyweather_source' and 'sites_dailyweather_names', i.e., the last entry is the one that will be used dw_LookupWeatherFolder <- function(sites_dailyweather_source) { if (any(lwf_cond1, lwf_cond2, lwf_cond3, lwf_cond4)) { @@ -807,7 +829,7 @@ if(anyNA(sites_dailyweather_source)){ stop("There are sites without daily weather. Provide data for all runs") } -if(exinfo$ExtractClimateChangeScenarios_CMIP3_BCSD_GDODCPUCLLNL_USA || exinfo$ExtractClimateChangeScenarios_CMIP3_BCSD_GDODCPUCLLNL_Global || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_GDODCPUCLLNL_USA || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_GDODCPUCLLNL_Global || exinfo$ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_USA) { +if (exinfo$ExtractClimateChangeScenarios) { getScenarioWeatherDataFromDatabase <- TRUE getCurrentWeatherDataFromDatabase <- TRUE } @@ -852,17 +874,15 @@ if (getCurrentWeatherDataFromDatabase || getScenarioWeatherDataFromDatabase) { if(!be.quiet) print(paste("SWSF sets up the database: ended after", round(difftime(Sys.time(), t1, units="secs"), 2), "s")) #------simulation timing -output_timescales_shortest <- ifelse(any(simulation_timescales=="daily"), 1, ifelse(any(simulation_timescales=="weekly"), 2, ifelse(any(simulation_timescales=="monthly"), 3, 4))) - simTime <- simTiming(startyr, simstartyr, endyr) simTime_ForEachUsedTimeUnit_North <- simTiming_ForEachUsedTimeUnit(simTime, - sim_tscales = simulation_timescales, + sim_tscales = c("daily", "monthly", "yearly"), latitude = 90, account_NorthSouth = accountNSHemispheres_agg) if(accountNSHemispheres_agg){ simTime_ForEachUsedTimeUnit_South <- simTiming_ForEachUsedTimeUnit(simTime, - sim_tscales = simulation_timescales, + sim_tscales = c("daily", "monthly", "yearly"), latitude = -90, account_NorthSouth = accountNSHemispheres_agg) @@ -920,130 +940,9 @@ if (do_check_include) { #--------------------------------------------------------------------------------------------------# -#--------------------------------------------------------------------------------------------------# -#------------------------OBTAIN INFORMATION FROM TABLES PRIOR TO SIMULATION RUNS TO CREATE THEM - -#------obtain information prior to simulation runs -if (any(actions == "create")) { - if (!be.quiet) - print(paste("SWSF obtains information prior to simulation runs: started at", t1 <- Sys.time())) - - if (any(create_treatments %in% c("LookupEvapCoeffFromTable", - "LookupTranspRegionsFromTable", - "LookupSnowDensityFromTable"))) { - - do_prior_lookup <- list( - LookupEvapCoeffFromTable = list( - flag = "LookupEvapCoeffFromTable", - pattern = "EvapCoeff", - tr_input = tr_input_EvapCoeff, - sw_input_use = "sw_input_soils_use", - sw_input = "sw_input_soils", - nvars = SoilLayer_MaxNo, - do_fill = FALSE, - datafile = file.path(dir.sw.dat, datafile.soils)), - - LookupTranspRegionsFromTable = list( - flag = "LookupTranspRegionsFromTable", - pattern = "TranspRegion", - tr_input = tr_input_TranspRegions, - sw_input_use = "sw_input_soils_use", - sw_input = "sw_input_soils", - nvars = SoilLayer_MaxNo, - do_fill = FALSE, - datafile = file.path(dir.sw.dat, datafile.soils)), - - LookupSnowDensityFromTable = list( - flag = "LookupSnowDensityFromTable", - pattern = "(snowd)|(SnowD_Hemisphere)", - tr_input = tr_input_SnowD, - sw_input_use = "sw_input_cloud_use", - sw_input = "sw_input_cloud", - nvars = 12 + 1, - do_fill = TRUE, - fill_pattern = "snowd", - fill_value = 76, # 76 kg/m3 = median of medians over 6 sites in Colorado and Wyoming: Judson, A. & Doesken, N. (2000) Density of Freshly Fallen Snow in the Central Rocky Mountains. Bulletin of the American Meteorological Society, 81, 1577-1587. - datafile = file.path(dir.sw.dat, datafile.cloud)) - ) - - done_prior <- rep(FALSE, length(do_prior_lookup)) - names(done_prior) <- names(do_prior_lookup) - - for (pc in do_prior_lookup) { - if (any(create_treatments == pc$flag)) { - #lookup values per category for each simulation run and copy values to datafile - temp <- sw_input_experimentals_use[pc$flag] - if (!temp || (temp && (length(unique(sw_input_experimentals[, pc$flag])) == 1L))) { - # Lookup prior to do_OneSite() only if option is off in sw_input_experimentals or constant - - if (continueAfterAbort) { - # Determine whether lookup already carried out and stored to file - sw_input_use <- get(pc$sw_input_use) - - icols <- grep(pc$pattern, names(sw_input_use)) - icols <- icols[sw_input_use[icols]] - temp <- get(pc$sw_input)[, icols, drop = FALSE] - - if (all(!apply(is.na(temp), 2, all))) { - # if no layer has only NAs for which the _use flag is on, then consider as completed - done_prior[pc$flag] <- TRUE - next - } - } - - if (!be.quiet) - print(paste(Sys.time(), ": performing", shQuote(pc$flag))) - - trtype <- if (sw_input_experimentals_use[pc$flag]) { - unique(sw_input_experimentals[, pc$flag]) - } else { - sw_input_treatments[, pc$flag] - } - - if (anyNA(trtype)) - stop("ERROR: ", pc$flag, " column cannot have any NAs.") - if (!all(unique(trtype) %in% rownames(pc$tr_input))) - stop("ERROR: ", pc$flag, " column values do not match up with trfile.", pc$flag, " row names.") - - tempdat <- try(get.LookupFromTable( - pattern = pc$pattern, - trtype = trtype, - tr_input = pc$tr_input, - sw_input_use = get(pc$sw_input_use), - sw_input = get(pc$sw_input), - nvars = pc$nvars)) - - done_prior[pc$flag] <- !inherits(tempdat, "try-error") - if (done_prior[pc$flag]) { - if (!is.null(pc$do_fill) && pc$do_fill) - tempdat <- fill_empty(tempdat, pattern = pc$fill_pattern, fill = pc$fill_value) - - assign(pc$sw_input_use, tempdat$sw_input_use, envir = .GlobalEnv) - assign(pc$sw_input, tempdat$sw_input, envir = .GlobalEnv) - - #write data to datafile - write.csv(reconstitute_inputfile(tempdat$sw_input_use, tempdat$sw_input), - file = pc$datafile, row.names = FALSE) - unlink(file.path(dir.in, datafile.SWRWinputs_preprocessed)) - } - - } else { - done_prior[pc$flag] <- FALSE - } - } - } - - rm(do_prior_lookup) - } - - if(!be.quiet) print(paste("SWSF obtains information prior to simulation runs: ended after", round(difftime(Sys.time(), t1, units="secs"), 2), "s")) -} - - #--------------------------------------------------------------------------------------------------# #------------------------CALCULATIONS PRIOR TO SIMULATION RUNS TO CREATE THEM -#------flags temp <- matrix(data=do.PriorCalculations, ncol=2, nrow=length(do.PriorCalculations)/2, byrow=TRUE) pcalcs <- lapply(temp[, 2], function(x) as.logical(as.numeric(x))) names(pcalcs) <- temp[, 1] @@ -1064,6 +963,8 @@ if (any(unlist(pcalcs))) { if (!be.quiet) print(paste("SWSF makes calculations prior to simulation runs: started at", t1 <- Sys.time())) + runIDs_adjust <- seq_len(runsN_master) # if not all, then runIDs_sites + if (pcalcs$ExtendSoilDatafileToRequestedSoilLayers) { if (!be.quiet) print(paste(Sys.time(), "'InterpolateSoilDatafileToRequestedSoilLayers' of", paste0(requested_soil_layers, collapse = ", "), "cm")) @@ -1083,13 +984,13 @@ if (any(unlist(pcalcs))) { ids_depth2 <- unique(sapply(ids_depth, function(x) x[length(x)])) use_layers <- paste0("depth_", ids_depth2) - layers_depth <- round(as.matrix(sw_input_soillayers[runIDs_sites, use_layers, drop = FALSE])) + layers_depth <- round(as.matrix(sw_input_soillayers[runIDs_adjust, use_layers, drop = FALSE])) i_nodata <- apply(is.na(layers_depth), 1, all) if (any(i_nodata)) { layers_depth <- layers_depth[!i_nodata, ] - runIDs_sites_ws <- runIDs_sites[!i_nodata] + runIDs_adjust_ws <- runIDs_adjust[!i_nodata] } else { - runIDs_sites_ws <- runIDs_sites + runIDs_adjust_ws <- runIDs_adjust } i_nodata <- apply(is.na(layers_depth), 2, all) if (any(i_nodata)) @@ -1102,7 +1003,7 @@ if (any(unlist(pcalcs))) { if (length(layer_sets) > 0) { has_changed <- FALSE sw_input_soils_data <- lapply(var_layers, function(x) - as.matrix(sw_input_soils[runIDs_sites_ws, grep(x, names(sw_input_soils))[ids_layers]])) + as.matrix(sw_input_soils[runIDs_adjust_ws, grep(x, names(sw_input_soils))[ids_layers]])) for (ils in seq_along(layer_sets)) { il_set <- avail_sl_ids == layer_sets[ils] @@ -1131,12 +1032,12 @@ if (any(unlist(pcalcs))) { lyrs <- seq_along(ldset) for (iv in seq_along(var_layers)) { i.temp <- grep(var_layers[iv], names(sw_input_soils_use))[lyrs] - sw_input_soils[runIDs_sites_ws[il_set], i.temp] <- + sw_input_soils[runIDs_adjust_ws[il_set], i.temp] <- round(sw_input_soils_data2[[iv]][, lyrs], if (var_layers[iv] %in% sl_vars_sub) 4L else 2L) sw_input_soils_use[i.temp] <- TRUE } - sw_input_soillayers[runIDs_sites_ws[il_set], + sw_input_soillayers[runIDs_adjust_ws[il_set], grep("depth_", names(sw_input_soillayers))[lyrs]] <- matrix(ldset, nrow = sum(il_set), ncol = length(ldset), byrow = TRUE) has_changed <- TRUE } @@ -1178,12 +1079,12 @@ if (any(unlist(pcalcs))) { temp <- icol_bsE[use_layers] icols <- temp[sw_input_soils_use[temp]] if (length(icols) > 0L) { - do_calc <- !all(rowSums(sw_input_soils[runIDs_sites, icols, drop = FALSE], na.rm = TRUE) > 0) + do_calc <- !all(rowSums(sw_input_soils[runIDs_adjust, icols, drop = FALSE], na.rm = TRUE) > 0) } } if (do_calc) { - layers_depth <- as.matrix(sw_input_soillayers[runIDs_sites, grep("depth_L", names(sw_input_soillayers))[use_layers], drop = FALSE]) + layers_depth <- as.matrix(sw_input_soillayers[runIDs_adjust, grep("depth_L", names(sw_input_soillayers))[use_layers], drop = FALSE]) depth_min_bs_evap <- min(layers_depth[, 1]) stopifnot(na.exclude(depth_min_bs_evap < depth_max_bs_evap)) @@ -1201,8 +1102,8 @@ if (any(unlist(pcalcs))) { ldepth_max_bs_evap <- rowSums(lyrs_max_bs_evap) #TODO: add influence of gravel - sand <- sw_input_soils[runIDs_sites, icol_sand, drop = FALSE] - clay <- sw_input_soils[runIDs_sites, icol_clay, drop = FALSE] + sand <- sw_input_soils[runIDs_adjust, icol_sand, drop = FALSE] + clay <- sw_input_soils[runIDs_adjust, icol_clay, drop = FALSE] sand_mean <- rowSums(lyrs_max_bs_evap * sand, na.rm = TRUE) / ldepth_max_bs_evap clay_mean <- rowSums(lyrs_max_bs_evap * clay, na.rm = TRUE) / ldepth_max_bs_evap @@ -1230,10 +1131,10 @@ if (any(unlist(pcalcs))) { icols_bse_notused <- icol_bsE[-icol] sw_input_soils_use[icols_bsE_used] <- TRUE - sw_input_soils[runIDs_sites, icols_bsE_used] <- round(coeff_bs_evap[, icol], 4) + sw_input_soils[runIDs_adjust, icols_bsE_used] <- round(coeff_bs_evap[, icol], 4) sw_input_soils_use[icols_bse_notused] <- FALSE - sw_input_soils[runIDs_sites, icols_bse_notused] <- 0 + sw_input_soils[runIDs_adjust, icols_bse_notused] <- 0 #write data to datafile.soils write.csv(reconstitute_inputfile(sw_input_soils_use, sw_input_soils), @@ -1263,13 +1164,134 @@ if (any(unlist(pcalcs))) { ld <- seq_len(SoilLayer_MaxNo) use.layers <- which(sw_input_soils_use[paste0("Sand_L", ld)]) index.soilTemp <- paste0("SoilTemp_L", ld)[use.layers] - soilTemp <- sw_input_soils[runIDs_sites, index.soilTemp, drop = FALSE] + soilTemp <- sw_input_soils[runIDs_adjust, index.soilTemp, drop = FALSE] sw_input_soils_use[index.soilTemp] <- TRUE } if(!be.quiet) print(paste("SWSF makes calculations prior to simulation runs: ended after", round(difftime(Sys.time(), t1, units="secs"), 2), "s")) } + +#--------------------------------------------------------------------------------------------------# +#------------------------OBTAIN INFORMATION FROM TABLES PRIOR TO SIMULATION RUNS TO CREATE THEM + +if (any(actions == "create")) { + if (!be.quiet) + print(paste("SWSF obtains information prior to simulation runs: started at", t1 <- Sys.time())) + + if (any(create_treatments %in% c("LookupEvapCoeffFromTable", + "LookupTranspRegionsFromTable", + "LookupSnowDensityFromTable"))) { + + do_prior_lookup <- list( + LookupEvapCoeffFromTable = list( + flag = "LookupEvapCoeffFromTable", + pattern = "EvapCoeff", + tr_input = tr_input_EvapCoeff, + sw_input_use = "sw_input_soils_use", + sw_input = "sw_input_soils", + nvars = SoilLayer_MaxNo, + do_fill = FALSE, + datafile = file.path(dir.sw.dat, datafile.soils)), + + LookupTranspRegionsFromTable = list( + flag = "LookupTranspRegionsFromTable", + pattern = "TranspRegion", + tr_input = tr_input_TranspRegions, + sw_input_use = "sw_input_soils_use", + sw_input = "sw_input_soils", + nvars = SoilLayer_MaxNo, + do_fill = FALSE, + datafile = file.path(dir.sw.dat, datafile.soils)), + + LookupSnowDensityFromTable = list( + flag = "LookupSnowDensityFromTable", + pattern = "(snowd)|(SnowD_Hemisphere)", + tr_input = tr_input_SnowD, + sw_input_use = "sw_input_cloud_use", + sw_input = "sw_input_cloud", + nvars = 12 + 1, + do_fill = TRUE, + fill_pattern = "snowd", + fill_value = 76, # 76 kg/m3 = median of medians over 6 sites in Colorado and Wyoming: Judson, A. & Doesken, N. (2000) Density of Freshly Fallen Snow in the Central Rocky Mountains. Bulletin of the American Meteorological Society, 81, 1577-1587. + datafile = file.path(dir.sw.dat, datafile.cloud)) + ) + + done_prior <- rep(FALSE, length(do_prior_lookup)) + names(done_prior) <- names(do_prior_lookup) + + for (pc in do_prior_lookup) { + if (any(create_treatments == pc$flag)) { + #lookup values per category for each simulation run and copy values to datafile + temp <- sw_input_experimentals_use[pc$flag] + if (!temp || (temp && (length(unique(sw_input_experimentals[, pc$flag])) == 1L))) { + # Lookup prior to do_OneSite() only if option is off in sw_input_experimentals or constant + + if (continueAfterAbort) { + # Determine whether lookup already carried out and stored to file + sw_input_use <- get(pc$sw_input_use) + + icols <- grep(pc$pattern, names(sw_input_use)) + icols <- icols[sw_input_use[icols]] + temp <- get(pc$sw_input)[, icols, drop = FALSE] + + if (all(!apply(is.na(temp), 2, all))) { + # if no layer has only NAs for which the _use flag is on, then consider as completed + done_prior[pc$flag] <- TRUE + next + } + } + + if (!be.quiet) + print(paste(Sys.time(), ": performing", shQuote(pc$flag))) + + trtype <- if (sw_input_experimentals_use[pc$flag]) { + unique(sw_input_experimentals[, pc$flag]) + } else { + sw_input_treatments[, pc$flag] + } + + if (anyNA(trtype)) + stop("ERROR: ", pc$flag, " column cannot have any NAs.") + if (!all(unique(trtype) %in% rownames(pc$tr_input))) + stop("ERROR: ", pc$flag, " column values do not match up with trfile.", pc$flag, " row names.") + + tempdat <- try(get.LookupFromTable( + pattern = pc$pattern, + trtype = trtype, + tr_input = pc$tr_input, + sw_input_use = get(pc$sw_input_use), + sw_input = get(pc$sw_input), + nvars = pc$nvars)) + + done_prior[pc$flag] <- !inherits(tempdat, "try-error") + if (done_prior[pc$flag]) { + if (!is.null(pc$do_fill) && pc$do_fill) + tempdat <- fill_empty(tempdat, pattern = pc$fill_pattern, fill = pc$fill_value) + + assign(pc$sw_input_use, tempdat$sw_input_use, envir = .GlobalEnv) + assign(pc$sw_input, tempdat$sw_input, envir = .GlobalEnv) + + #write data to datafile + write.csv(reconstitute_inputfile(tempdat$sw_input_use, tempdat$sw_input), + file = pc$datafile, row.names = FALSE) + unlink(file.path(dir.in, datafile.SWRWinputs_preprocessed)) + } + + } else { + done_prior[pc$flag] <- FALSE + } + } + } + + rm(do_prior_lookup) + } + + if(!be.quiet) print(paste("SWSF obtains information prior to simulation runs: ended after", round(difftime(Sys.time(), t1, units="secs"), 2), "s")) +} + + + #--------------------------------------------------------------------------------------------------# #------------------------MAP INPUT VARIABLES (FOR QUALITY CONTROL) if (any(actions == "map_input") && length(map_vars) > 0) { @@ -1295,9 +1317,11 @@ if (any(actions == "map_input") && length(map_vars) > 0) { dir.create(dir.inmapvar <- file.path(dir.inmap, map_vars[iv]), showWarnings = FALSE) for (it1 in seq_along(iv_locs)) for (it2 in seq_along(iv_locs[[it1]])) { - dat <- as.numeric(get(names(iv_locs)[it1])[runIDs_sites, iv_locs[[it1]][it2]]) + dat <- get(names(iv_locs)[it1])[runIDs_sites, iv_locs[[it1]][it2]] + dat <- try(as.numeric(dat), silent = TRUE) # e.g., sw_input_cloud[, "SnowD_Hemisphere"] contains only strings for which as.numeric() issues a warning - if (any(is.finite(dat))) { + # this code plots only numeric maps + if (any(is.finite(dat)) && !inherits(dat, "try-error")) { names(dat) <- iv_locs[[it1]][it2] map_flag <- paste(names(iv_locs)[it1], iv_locs[[it1]][it2], sim_cells_or_points, sep = "_") @@ -1386,7 +1410,18 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer flag.icounter <- formatC(i_sim, width=counter.digitsN, format = "d", flag="0") if (debug.dump.objects) - on.exit(save(list = ls(), file = file.path(dir.prj, paste0("last.dump.do_OneSite_", i_sim, ".RData")))) + print(paste0("'last.dump.do_OneSite_", i_sim, ".RData' will be produced if 'do_OneSite' fails")) + on.exit({ + op_prev <- options("warn") + options(warn = 0) + env_tosave <- new.env() + list2env(as.list(.GlobalEnv), envir = env_tosave) + list2env(as.list(parent.frame()), envir = env_tosave) + list2env(as.list(environment()), envir = env_tosave) + save(list = ls(envir = env_tosave), envir = env_tosave, + file = file.path(dir.prj, paste0("last.dump.do_OneSite_", i_sim, ".RData"))) + options(op_prev) + }) #-----------------------Check for experimentals if(expN > 0 && length(create_experimentals) > 0) { @@ -1412,7 +1447,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer #Check what needs to be done #TODO this currently doesn't work in the database setup isdone.overallAggs <- rep(FALSE, scenario_No) - if(any(simulation_timescales=="daily") && daily_no > 0){ + if(daily_no > 0){ isdone.dailyAggs <- matrix(data=FALSE, nrow=daily_no, ncol=scenario_No) } else { isdone.dailyAggs <- TRUE @@ -1471,7 +1506,8 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer if(!any(create_treatments=="soilsin") & tasks$create == 1L) { soildepth <- i_sw_input_soillayers$SoilDepth_cm layers_depth <- na.omit(as.numeric(i_sw_input_soillayers[2 + lmax])) - if(!(length(d <- which(soildepth == layers_depth)) > 0)){ #soildepth is one of the lower layer boundaries + d <- which(soildepth == layers_depth) + if (length(d) == 0) { #soildepth is one of the lower layer boundaries d <- min(length(layers_depth), findInterval(soildepth, layers_depth)+1) #soildepth is not one of the lower layer boundaries, the next deeper layer boundary is used } } else {# needs to be read from soilsin file @@ -1510,7 +1546,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer #------simulation timing needs to be adjusted simTime <- simTiming(startyr, simstartyr, endyr) simTime2 <- simTiming_ForEachUsedTimeUnit(simTime, - sim_tscales = simulation_timescales, + sim_tscales = c("daily", "monthly", "yearly"), latitude = i_SWRunInformation$Y_WGS84, account_NorthSouth = accountNSHemispheres_agg) @@ -1884,6 +1920,10 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer # Calculate soil layer structure, because any(create_treatments=="soilsin") and soilsin may have a different soil layer structure than the datafiles temp <- as.numeric(na.omit(unlist(i_sw_input_soillayers[paste0("depth_L", seq_len(SoilLayer_MaxNo))]))) layers_depth.datafile <- temp[temp <= as.numeric(i_sw_input_soillayers["SoilDepth_cm"])] + if (length(layers_depth.datafile) == 0) { + # this condition arises if i_sw_input_soillayers["SoilDepth_cm"] < i_sw_input_soillayers["depth_L1"] + layers_depth.datafile <- temp[1] + } if (!identical(layers_depth.datafile, soil_swdat[, "depth_cm"])) { # different soil layer structure in soilsin and datafile AND since variables are flagged in sw_input_soils_use => use only datafile values @@ -1957,7 +1997,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer # Impute missing/bad soil data from previous layer icol_excl <- which(soil_cols %in% "soilTemp_c") icols <- seq_along(soil_cols)[-icol_excl] - bad_data <- !check_soil_data(soildat[, -icol_excl]) + bad_data <- !check_soil_data(soildat[, -icol_excl, drop = FALSE]) if (any(bad_data)) for (l in ld) { lbad <- bad_data[l, ] @@ -1995,11 +2035,10 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer EVCO_done <- sum(soil_swdat[, "EvapBareSoil_frac"]) > 0 TRCO_done <- all(colSums(soil_swdat[, c("transpGrass_frac", "transpShrub_frac", - "transpTree_frac", "transpForb_frac")]) > 0) + "transpTree_frac", "transpForb_frac"), drop = FALSE]) > 0) swSoils_Layers(swRunScenariosData[[1]]) <- soil_swdat - #add transpiration regions information to siteparamin if(print.debug) print("Start of transpregion") if(sum(use_transpregion) > 0){ @@ -2038,8 +2077,8 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer swWeather_pct_SnowRunoff(swRunScenariosData[[1]]) <- i_sw_input_weather$RunOffOnPerSnowmelt_Percent swWeather_FirstYearHistorical(swRunScenariosData[[1]]) <- simstartyr - - swOUT_TimeStep(swRunScenariosData[[1]]) <- sapply(simulation_timescales, function(x) ifelse(x=="daily", 1, ifelse(x=="weekly", 2, ifelse(x=="monthly", 3, ifelse(x=="yearly",4,5)))) )-1 + # Set simulation_timescales fix to daily, monthly, and yearly + swOUT_TimeStep(swRunScenariosData[[1]]) <- c(daily = 0, monthly = 2, yearly = 3) #############Get Weather Data################ if (print.debug) print("Start of daily weather") @@ -2338,7 +2377,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer #adjust init soil temperatures to climatic conditions use_soil_temp <- sw_input_soils_use[paste0("SoilTemp_L", ld)] if (any(use_soil_temp)) { - temp <- 1:nrow(swSoils_Layers(swRunScenariosData[[sc]])) + temp <- seq_len(nrow(swSoils_Layers(swRunScenariosData[[sc]]))) if(exists("init.soilTprofile")) { swSoils_Layers(swRunScenariosData[[sc]])[,12][use_soil_temp] <- init.soilTprofile } else { @@ -2413,18 +2452,58 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer } #adjust Root Profile - need composition fractions set above - if(print.debug) print("Start of AdjRootProfile") - if(any(create_treatments == "AdjRootProfile") && i_sw_input_treatments$AdjRootProfile && any(create_treatments == "PotentialNaturalVegetation_CompositionShrubsC3C4_Paruelo1996") && i_sw_input_treatments$PotentialNaturalVegetation_CompositionShrubsC3C4_Paruelo1996) { - - trco_type_C3 <- ifelse(any(create_treatments == "RootProfile_C3") && any(colnames(tr_input_TranspCoeff) == i_sw_input_treatments$RootProfile_C3), i_sw_input_treatments$RootProfile_C3, "SchenkJackson2003_PCdry_grasses") - trco_type_C4 <- ifelse(any(create_treatments == "RootProfile_C4") && any(colnames(tr_input_TranspCoeff) == i_sw_input_treatments$RootProfile_C4), i_sw_input_treatments$RootProfile_C4, "SchenkJackson2003_PCdry_grasses") - trco_type_annuals <- ifelse(any(create_treatments == "RootProfile_Annuals") && any(colnames(tr_input_TranspCoeff) == i_sw_input_treatments$RootProfile_Annuals), i_sw_input_treatments$RootProfile_Annuals, "Jacksonetal1996_crops") - trco_type_shrubs <- ifelse(any(create_treatments == "RootProfile_Shrubs") && any(colnames(tr_input_TranspCoeff) == i_sw_input_treatments$RootProfile_Shrubs), i_sw_input_treatments$RootProfile_Shrubs, "SchenkJackson2003_PCdry_shrubs") - tro_type_forb <- ifelse(any(create_treatments == "RootProfile_Forbs") && any(colnames(tr_input_TranspCoeff) == i_sw_input_treatments$RootProfile_Forbs), i_sw_input_treatments$RootProfile_Forbs, "SchenkJackson2003_PCdry_forbs") - #TODO: add 'SchenkJackson2003_PCdry_forbs' to 'TranspirationCoefficients_v2.csv' - tro_type_tree <- ifelse(any(create_treatments == "LookupTranspCoeffFromTable_Tree") && is.finite(i_sw_input_treatments$LookupTranspCoeffFromTable_Tree) && any(colnames(tr_input_TranspCoeff) == i_sw_input_treatments$LookupTranspCoeffFromTable_Tree), i_sw_input_treatments$LookupTranspCoeffFromTable_Tree, "FILL") - - if(grass.fraction==0) { #if grass.fraction is 0 then Grass.trco will be 0 + if (print.debug) + print("Start of AdjRootProfile") + + if (any(create_treatments == "AdjRootProfile") && + i_sw_input_treatments$AdjRootProfile && + any(create_treatments == "PotentialNaturalVegetation_CompositionShrubsC3C4_Paruelo1996") && + i_sw_input_treatments$PotentialNaturalVegetation_CompositionShrubsC3C4_Paruelo1996) { + + trco_type_C3 <- if (any(create_treatments == "RootProfile_C3") && + any(colnames(tr_input_TranspCoeff) == i_sw_input_treatments$RootProfile_C3)) { + i_sw_input_treatments$RootProfile_C3 + } else { + "SchenkJackson2003_PCdry_grasses" + } + + trco_type_C4 <- if (any(create_treatments == "RootProfile_C4") && + any(colnames(tr_input_TranspCoeff) == i_sw_input_treatments$RootProfile_C4)) { + i_sw_input_treatments$RootProfile_C4 + } else { + "SchenkJackson2003_PCdry_grasses" + } + + trco_type_annuals <- if (any(create_treatments == "RootProfile_Annuals") && + any(colnames(tr_input_TranspCoeff) == i_sw_input_treatments$RootProfile_Annuals)) { + i_sw_input_treatments$RootProfile_Annuals + } else { + "Jacksonetal1996_crops" + } + + trco_type_shrubs <- if (any(create_treatments == "RootProfile_Shrubs") && + any(colnames(tr_input_TranspCoeff) == i_sw_input_treatments$RootProfile_Shrubs)) { + i_sw_input_treatments$RootProfile_Shrubs + } else { + "SchenkJackson2003_PCdry_shrubs" + } + + tro_type_forb <- if (any(create_treatments == "RootProfile_Forbs") && + any(colnames(tr_input_TranspCoeff) == i_sw_input_treatments$RootProfile_Forbs)) { + i_sw_input_treatments$RootProfile_Forbs + } else { + "SchenkJackson2003_PCdry_forbs" + } + + tro_type_tree <- if (any(create_treatments == "LookupTranspCoeffFromTable_Tree") && + is.finite(i_sw_input_treatments$LookupTranspCoeffFromTable_Tree) && + any(colnames(tr_input_TranspCoeff) == i_sw_input_treatments$LookupTranspCoeffFromTable_Tree)) { + i_sw_input_treatments$LookupTranspCoeffFromTable_Tree + } else { + "FILL" + } + + if (grass.fraction == 0) { #if grass.fraction is 0 then Grass.trco will be 0 Grass.trco <- TranspCoeffByVegType( tr_input_code = tr_input_TranspCoeff_Code, tr_input_coeff = tr_input_TranspCoeff, soillayer_no = d, @@ -2795,7 +2874,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer gravel.bottom=weighted.mean(gravel[bottomL], layers_width[bottomL]) ) - if(any(simulation_timescales=="daily") && daily_no > 0){ + if(daily_no > 0){ textureDAgg <- list( gravel=sapply(1:aggLs_no, FUN=function(x) weighted.mean(gravel[aggLs[[x]]], layers_width[aggLs[[x]]])), sand=sapply(1:aggLs_no, FUN=function(x) weighted.mean(sand[aggLs[[x]]], layers_width[aggLs[[x]]])), clay=sapply(1:aggLs_no, FUN=function(x) weighted.mean(clay[aggLs[[x]]], layers_width[aggLs[[x]]])) @@ -2914,11 +2993,11 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer resAgg[nv:(nv+11), is_central] <- swProd_MonProd_shrub(swRunScenariosData[[sc]])[,2]*swProd_MonProd_shrub(swRunScenariosData[[sc]])[,3] nv <- nv+12 - resMeans[nv:(nv+11), is_central] <- swProd_MonProd_tree(swRunScenariosData[[sc]])[,1] + resAgg[nv:(nv+11), is_central] <- swProd_MonProd_tree(swRunScenariosData[[sc]])[,1] nv <- nv+12 - resMeans[nv:(nv+11), is_central] <- swProd_MonProd_tree(swRunScenariosData[[sc]])[,2] + resAgg[nv:(nv+11), is_central] <- swProd_MonProd_tree(swRunScenariosData[[sc]])[,2] nv <- nv+12 - resMeans[nv:(nv+11), is_central] <- swProd_MonProd_tree(swRunScenariosData[[sc]])[,2]*swProd_MonProd_tree(swRunScenariosData[[sc]])[,3] + resAgg[nv:(nv+11), is_central] <- swProd_MonProd_tree(swRunScenariosData[[sc]])[,2]*swProd_MonProd_tree(swRunScenariosData[[sc]])[,3] nv <- nv+12 resAgg[nv:(nv+11), is_central] <- swProd_MonProd_forb(swRunScenariosData[[sc]])[,1] @@ -2948,7 +3027,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer nv <- nv+2 } #4 - if(any(simulation_timescales=="monthly") && aon$input_Phenology) { + if(aon$input_Phenology) { if(print.debug) print("Aggregation of input_Phenology") if(!exists("temp.mo")) temp.mo <- get_Temp_mo(sc, runData, simTime) @@ -2968,7 +3047,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer #5 if(aon$input_TranspirationCoeff){ if(print.debug) print("Aggregation of input_TranspirationCoeff") - Tcoeff <- swSoils_Layers(swRunScenariosData[[1]])[, 5:8] + Tcoeff <- swSoils_Layers(swRunScenariosData[[1]])[, 5:8, drop = FALSE] if(is.null(dim(Tcoeff))) Tcoeff <- matrix(Tcoeff, nrow=1) TaggLs <- sapply(aggLs, FUN=function(l) apply(Tcoeff[l,, drop=FALSE], 2, sum)) @@ -3000,7 +3079,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer #---Aggregation: Climate and weather #7 - if(any(simulation_timescales=="yearly") & aon$yearlyTemp){ + if(aon$yearlyTemp){ if(print.debug) print("Aggregation of yearlyTemp") if(!exists("temp.yr")) temp.yr <- get_Temp_yr(sc, runData, simTime) @@ -3008,7 +3087,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer nv <- nv+1 } #8 - if(any(simulation_timescales=="yearly") & aon$yearlyPPT){ + if(aon$yearlyPPT){ if(print.debug) print("Aggregation of yearlyPPT") if(!exists("prcp.yr")) prcp.yr <- get_PPT_yr(sc, runData, simTime) @@ -3017,7 +3096,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer nv <- nv+2 } #9 - if(any(simulation_timescales=="daily") & any(simulation_timescales=="yearly") & aon$dailySnowpack){ + if(aon$dailySnowpack){ if(print.debug) print("Aggregation of dailySnowpack") if(!exists("prcp.yr")) prcp.yr <- get_PPT_yr(sc, runData, simTime) if(!exists("prcp.dy")) prcp.dy <- get_PPT_dy(sc, runData, simTime) @@ -3034,7 +3113,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer rm(rainOnSnow) } #10 - if(any(simulation_timescales=="daily") & aon$dailySnowpack){#daily snowpack: accountNSHemispheres_agg + if(aon$dailySnowpack){#daily snowpack: accountNSHemispheres_agg if(print.debug) print("Aggregation of dailySnowpack2") if(!exists("SWE.dy")) SWE.dy <- get_SWE_dy(sc, runData, simTime) @@ -3081,7 +3160,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer nv <- nv+5 } #11 - if(any(simulation_timescales=="daily") & aon$dailyFrostInSnowfreePeriod){ + if(aon$dailyFrostInSnowfreePeriod){ if(print.debug) print("Aggregation of dailyFrostInSnowfreePeriod") if(!exists("temp.dy")) temp.dy <- get_Temp_dy(sc, runData, simTime) if(!exists("SWE.dy")) SWE.dy <- get_SWE_dy(sc, runData, simTime) @@ -3097,7 +3176,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer rm(frostWithoutSnow) } #12 - if(any(simulation_timescales=="daily") & aon$dailyHotDays){ + if(aon$dailyHotDays){ if(print.debug) print("Aggregation of dailyHotDays") if(!exists("temp.dy")) temp.dy <- get_Temp_dy(sc, runData, simTime) @@ -3120,7 +3199,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer rm(HotDays, dailyExcess) } #12b - if(any(simulation_timescales=="daily") & aon$dailyWarmDays){ + if(aon$dailyWarmDays){ if(print.debug) print("Aggregation of dailyWarmDays") if(!exists("temp.dy")) temp.dy <- get_Temp_dy(sc, runData, simTime) @@ -3144,7 +3223,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer } #13 - if(any(simulation_timescales=="daily") & aon$dailyPrecipitationEventSizeDistribution){ #daily weather frequency distributions + if(aon$dailyPrecipitationEventSizeDistribution){ #daily weather frequency distributions if(print.debug) print("Aggregation of dailyPrecipitationEventSizeDistribution") if(!exists("prcp.dy")) prcp.dy <- get_PPT_dy(sc, runData, simTime) @@ -3161,7 +3240,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer rm(ppt_sizes) } #15 - if(any(simulation_timescales=="yearly") & aon$yearlyPET){ + if(aon$yearlyPET){ if(print.debug) print("Aggregation of yearlyPET") if(!exists("PET.yr")) PET.yr <- get_PET_yr(sc, runData, simTime) @@ -3170,7 +3249,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer } #16 #correl monthly swp (top and bottom) vs. pet and ppt vs. temp, use product moment correlation coefficient {eqn. 11.6, \Sala, 1997 #45} - if(any(simulation_timescales=="monthly") & aon$monthlySeasonalityIndices){ + if(aon$monthlySeasonalityIndices){ if(print.debug) print("Aggregation of monthlySeasonalityIndices") if(!exists("vwcmatric.mo")) vwcmatric.mo <- get_Response_aggL(sc, sw_vwcmatric, tscale = "mo", scaler = 1, FUN = weighted.mean, weights = layers_width, x = runData, st = simTime, st2 = simTime2, topL = topL, bottomL = bottomL) if(!exists("swpmatric.mo")) swpmatric.mo <- get_SWPmatric_aggL(vwcmatric.mo, texture, sand, clay) @@ -3196,7 +3275,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer #---Aggregation: Climatic dryness #17 - if(any(simulation_timescales=="yearly") & any(simulation_timescales=="monthly") & aon$yearlymonthlyTemperateDrylandIndices){ + if(aon$yearlymonthlyTemperateDrylandIndices){ if(print.debug) print("Aggregation of yearlymonthlyTemperateDrylandIndices") if(!exists("prcp.yr")) prcp.yr <- get_PPT_yr(sc, runData, simTime) if(!exists("PET.yr")) PET.yr <- get_PET_yr(sc, runData, simTime) @@ -3217,7 +3296,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer rm(di.ts, di.normals) } #18 - if(any(simulation_timescales=="yearly") & aon$yearlyDryWetPeriods){ + if(aon$yearlyDryWetPeriods){ if(print.debug) print("Aggregation of yearlyDryWetPeriods: NOTE: aggregation across spells over multiple years instead of across values for each years") if(!exists("prcp.yr")) prcp.yr <- get_PPT_yr(sc, runData, simTime) @@ -3232,7 +3311,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer rm(temp.rle) } #19 - if(any(simulation_timescales=="daily") & aon$dailyWeatherGeneratorCharacteristics){#daily response to weather generator treatments + if(aon$dailyWeatherGeneratorCharacteristics){#daily response to weather generator treatments if(print.debug) print("Aggregation of dailyWeatherGeneratorCharacteristics") if(!exists("prcp.dy")) prcp.dy <- get_PPT_dy(sc, runData, simTime) if(!exists("temp.dy")) temp.dy <- get_Temp_dy(sc, runData, simTime) @@ -3256,7 +3335,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer rm(dws, dds, tv) } #20 - if(any(simulation_timescales=="daily") & aon$dailyPrecipitationFreeEventDistribution){ #daily weather frequency distributions + if(aon$dailyPrecipitationFreeEventDistribution){ #daily weather frequency distributions if(print.debug) print("Aggregation of dailyPrecipitationFreeEventDistribution") if(!exists("prcp.dy")) prcp.dy <- get_PPT_dy(sc, runData, simTime) @@ -3273,7 +3352,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer rm(ppt_free) } #21 - if(any(simulation_timescales=="monthly") & aon$monthlySPEIEvents){ + if(aon$monthlySPEIEvents){ if(print.debug) print("Aggregation of monthlySPEIEvents: NOTE: aggregation across spells over multiple years instead of across values for each years") #standardized precipitation-evapotranspiration index, SPEI: Vicente-Serrano, S.M., Beguer, S., Lorenzo-Lacruz, J., Camarero, J.s.J., Lopez-Moreno, J.I., Azorin-Molina, C., Revuelto, J.s., Morn-Tejeda, E. & Sanchez-Lorenzo, A. (2012) Performance of Drought Indices for Ecological, Agricultural, and Hydrological Applications. Earth Interactions, 16, 1-27. if(!exists("PET.mo")) PET.mo <- get_PET_mo(sc, runData, simTime) @@ -3312,7 +3391,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer #---Aggregation: Climatic control #22 - if(any(simulation_timescales=="monthly") & aon$monthlyPlantGrowthControls){ #Nemani RR, Keeling CD, Hashimoto H et al. (2003) Climate-Driven Increases in Global Terrestrial Net Primary Production from 1982 to 1999. Science, 300, 1560-1563. + if(aon$monthlyPlantGrowthControls){ #Nemani RR, Keeling CD, Hashimoto H et al. (2003) Climate-Driven Increases in Global Terrestrial Net Primary Production from 1982 to 1999. Science, 300, 1560-1563. if(print.debug) print("Aggregation of monthlyPlantGrowthControls") if(!exists("temp.mo")) temp.mo <- get_Temp_mo(sc, runData, simTime) if(!exists("PET.mo")) PET.mo <- get_PET_mo(sc, runData, simTime) @@ -3347,7 +3426,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer rm(DayNumber_ForEachUsedMonth, DayNumber_ForEachUsedYear, control_temp, control_water, control_radiation, aridity, temp, cloudiness) } #23 - if(any(simulation_timescales=="daily") & aon$dailyC4_TempVar){ + if(aon$dailyC4_TempVar){ # Variables to estimate percent C4 species in North America: Teeri JA, Stowe LG (1976) Climatic patterns and the distribution of C4 grasses in North America. Oecologia, 23, 1-12. if(print.debug) print("Aggregation of dailyC4_TempVar") if(!exists("temp.dy")) temp.dy <- get_Temp_dy(sc, runData, simTime) @@ -3361,7 +3440,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer nv <- nv+3 } #24 - if(any(simulation_timescales=="daily") & aon$dailyDegreeDays){ #Degree days based on daily temp + if(aon$dailyDegreeDays){ #Degree days based on daily temp if(print.debug) print("Aggregation of dailyDegreeDays") if(!exists("temp.dy")) temp.dy <- get_Temp_dy(sc, runData, simTime) @@ -3378,7 +3457,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer #---Aggregation: Yearly water balance #27.0 - if(any(simulation_timescales=="yearly") & aon$yearlyAET){ + if(aon$yearlyAET){ if(print.debug) print("Aggregation of yearlyAET") if(!exists("AET.yr")) AET.yr <- get_AET_yr(sc, runData, simTime) @@ -3387,7 +3466,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer } #27 - if(any(simulation_timescales=="yearly") & aon$yearlyWaterBalanceFluxes) { + if(aon$yearlyWaterBalanceFluxes) { if(print.debug) print("Aggregation of yearlyWaterBalanceFluxes") if(!exists("prcp.yr")) prcp.yr <- get_PPT_yr(sc, runData, simTime) if(!exists("Esurface.yr")) Esurface.yr <- get_Esurface_yr(sc, runData, simTime) @@ -3419,22 +3498,18 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer hydred.topTobottom <- temp1[simTime$index.useyr,1+topL] } - if( any(simulation_timescales=="daily")) { - temp1 <- 10 * slot(slot(runData[[sc]],sw_swcbulk),"Day") - if(simTime$index.usedy[1] == 1){ #simstartyr == startyr, then (simTime$index.usedy-1) misses first value - index.usedyPlusOne <- simTime$index.usedy[-length(simTime$index.usedy)]+1 - } else { - index.usedyPlusOne <- simTime$index.usedy - } - if(length(topL) > 1) { - swcdyflux <- apply(temp1[index.usedyPlusOne,2+ld], 1, sum) - apply(temp1[index.usedyPlusOne-1,2+ld], 1, sum) - } else { - swcdyflux <- temp1[index.usedyPlusOne,2+ld] - temp1[index.usedyPlusOne-1,2+ld] - } - swc.flux <- tapply(swcdyflux, temp1[index.usedyPlusOne, 1], sum) - } else { - swc.flux <- rep(NA, simTime$no.useyr) - } + temp1 <- 10 * slot(slot(runData[[sc]],sw_swcbulk),"Day") + if(simTime$index.usedy[1] == 1){ #simstartyr == startyr, then (simTime$index.usedy-1) misses first value + index.usedyPlusOne <- simTime$index.usedy[-length(simTime$index.usedy)]+1 + } else { + index.usedyPlusOne <- simTime$index.usedy + } + if(length(topL) > 1) { + swcdyflux <- apply(temp1[index.usedyPlusOne,2+ld], 1, sum) - apply(temp1[index.usedyPlusOne-1,2+ld], 1, sum) + } else { + swcdyflux <- temp1[index.usedyPlusOne,2+ld] - temp1[index.usedyPlusOne-1,2+ld] + } + swc.flux <- tapply(swcdyflux, temp1[index.usedyPlusOne, 1], sum) # fluxes fluxtemp <- cbind(prcp.yr$rain, rain_toSoil, prcp.yr$snowfall, prcp.yr$snowmelt, prcp.yr$snowloss, intercept.yr$sum, intercept.yr$veg, intercept.yr$litter, Esurface.yr$veg, Esurface.yr$litter, inf.yr$inf, runoff.yr$val, evap.tot, evap_soil.tot, Esoil.yr$top, Esoil.yr$bottom, transp.tot, transp.yr$top, transp.yr$bottom, hydred.topTobottom, drain.topTobottom, deepDrain.yr$val, swc.flux) @@ -3457,70 +3532,106 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer } #27.2 - if(any(simulation_timescales=="daily") & aon$dailySoilWaterPulseVsStorage){ + if(aon$dailySoilWaterPulseVsStorage){ if(print.debug) print("Aggregation of dailySoilWaterPulseVsStorage") if(!exists("inf.dy")) inf.dy <- get_Inf_dy(sc, runData, simTime) if(!exists("transp.dy.all")) transp.dy.all <- get_Response_aggL(sc, sw_transp, tscale = "dyAll", scaler = 10, FUN = sum, x = runData, st = simTime, st2 = simTime2, topL = topL, bottomL = bottomL) if(!exists("Esoil.dy.all")) Esoil.dy.all <- get_Response_aggL(sc, sw_evsoil, tscale = "dyAll", scaler = 10, FUN = sum, x = runData, st = simTime, st2 = simTime2, topL = topL, bottomL = bottomL) if(!exists("deepDrain.dy")) deepDrain.dy <- get_DeepDrain_dy(sc, runData, simTime) - percolation <- 10 * slot(slot(runData[[sc]],sw_percolation), "Day")[simTime$index.usedy, 2 + head(ld, n=-1)] - hydred <- 10 * slot(slot(runData[[sc]],sw_hd), "Day")[simTime$index.usedy, 2 + ld] + percolation <- if (d > 1) { + 10 * slot(slot(runData[[sc]],sw_percolation), "Day")[simTime$index.usedy, 2 + ld[-d]] + } else { + rep(0, simTime$no.usedy) + } + hydred <- 10 * slot(slot(runData[[sc]],sw_hd), "Day")[simTime$index.usedy, 2 + ld] # Water balance - outputs_by_layer <- inputs_by_layer <- matrix(0, nrow = length(simTime$index.usedy), ncol = length(ld)) + outputs_by_layer <- inputs_by_layer <- matrix(0, nrow = simTime$no.usedy, ncol = d, + dimnames = list(NULL, paste0("total_Lyr_", ld))) # Inputs: infiltration + received hydraulic redistribution + received percolation inputs_by_layer[, 1] <- inputs_by_layer[, 1] + inf.dy$inf inputs_by_layer <- inputs_by_layer + ifelse(hydred > 0, hydred, 0) - inputs_by_layer[, -1] <- inputs_by_layer[, -1] + ifelse(percolation > 0, percolation, 0) + if (d > 1) { + inputs_by_layer[, -1] <- inputs_by_layer[, -1] + ifelse(percolation > 0, percolation, 0) + } # Outputs: soil evaporation + transpiration + deep drainage + hydraulic redistribution donor + percolation donor - itemp <- 1:(ncol(Esoil.dy.all$val) - 2) - outputs_by_layer[, itemp] <- outputs_by_layer[, itemp] + Esoil.dy.all$val[simTime$index.usedy, -(1:2)] - itemp <- grepl("transp_total", colnames(transp.dy.all$val)) - outputs_by_layer[, 1:sum(itemp)] <- outputs_by_layer[, 1:sum(itemp)] + transp.dy.all$val[simTime$index.usedy, itemp] - itemp <- ncol(outputs_by_layer) - outputs_by_layer[, itemp] <- outputs_by_layer[, itemp] + deepDrain.dy$val - outputs_by_layer[, -itemp] <- outputs_by_layer[, -itemp] + ifelse(percolation < 0, -percolation, 0) - outputs_by_layer <- outputs_by_layer + ifelse(hydred < 0, -hydred, 0) + if (ncol(Esoil.dy.all$val) > 2) { + itemp <- seq_len(ncol(Esoil.dy.all$val) - 2) + outputs_by_layer[, itemp] <- outputs_by_layer[, itemp] + + Esoil.dy.all$val[simTime$index.usedy, -(1:2)] + } + itemp <- grepl("transp_total", colnames(transp.dy.all$val)) + if (any(itemp)) { + itemp <- seq_len(sum(itemp)) + outputs_by_layer[, itemp] <- outputs_by_layer[, itemp] + + transp.dy.all$val[simTime$index.usedy, itemp] + } + itemp <- ncol(outputs_by_layer) + outputs_by_layer[, itemp] <- outputs_by_layer[, itemp] + deepDrain.dy$val + if (itemp > 1) { + outputs_by_layer[, -itemp] <- outputs_by_layer[, -itemp] + + ifelse(percolation < 0, -percolation, 0) + } + outputs_by_layer <- outputs_by_layer + ifelse(hydred < 0, -hydred, 0) # balance balance <- inputs_by_layer - outputs_by_layer extraction <- balance < 0 storage_use <- by(cbind(extraction, outputs_by_layer), - INDICES = simTime2$year_ForEachUsedDay_NSadj, - FUN = function(x) { - res1 <- apply(x[, ld], 2, rle) - res2 <- apply(x[, max(ld) + ld], 2, function(y) list(out = y)) - modifyList(res1, res2) - }) - - # mean duration among extracting spells for each layer and each year - extraction_duration_days <- vapply(storage_use, function(ydat) - vapply(ydat, function(dat) mean(dat$lengths[as.logical(dat$values)]), - FUN.VALUE = NA_real_), - FUN.VALUE = rep(NA_real_, dim(outputs_by_layer)[2])) + INDICES = simTime2$year_ForEachUsedDay_NSadj, + FUN = function(x) { + res1 <- apply(x[, ld, drop = FALSE], 2, rle) + res2 <- apply(x[, soilLayers_N + ld, drop = FALSE], 2, function(y) list(out = y)) + modifyList(res1, res2) + }, simplify = FALSE) + + # median duration among extracting spells for each layer and each year + extraction_duration_days <- vapply(storage_use, function(x) + vapply(x, function(dat) { + if (is.null(dat$out) || is.null(dat$values)) { + NA + } else { + temp <- as.logical(dat$values) + if (any(temp)) mean(dat$lengths[as.logical(dat$values)]) else NA + } + }, FUN.VALUE = NA_real_), + FUN.VALUE = rep(NA_real_, soilLayers_N)) + + if (!is.matrix(extraction_duration_days)) { + extraction_duration_days <- matrix(extraction_duration_days, nrow = soilLayers_N, ncol = simTime$no.useyr) + } # median annual sum of all extracted water during extracting spells for each layer and each year extraction_summed_mm <- vapply(storage_use, function(x) vapply(x, function(dat) { - dat$values <- as.logical(dat$values) - temp <- dat - if (any(dat$values)) - temp$values[dat$values] <- seq_len(sum(dat$values)) # give unique ID to each extraction spell - has_zero <- any(!dat$values) - if (has_zero) - temp$values[!dat$values] <- 0 # we are not interested in positive spells - storage_ids <- inverse.rle(temp) - x <- tapply(dat$out, storage_ids, sum) # sum up extracted water for each extraction spell - if (has_zero && length(x) > 0) - x <- x[-1] # remove first element because this represents the positive spells (id = 0) - sum(x) - }, FUN.VALUE = NA_real_), FUN.VALUE = rep(NA_real_, dim(outputs_by_layer)[2])) + if (is.null(dat$out) || is.null(dat$values)) { + NA + } else { + dat$values <- as.logical(dat$values) + temp <- dat + if (any(dat$values)) + temp$values[dat$values] <- seq_len(sum(dat$values)) # give unique ID to each extraction spell + has_zero <- any(!dat$values) + if (has_zero) + temp$values[!dat$values] <- 0 # we are not interested in positive spells + storage_ids <- inverse.rle(temp) + x <- tapply(dat$out, INDEX=storage_ids, sum) # sum up extracted water for each extraction spell + if (has_zero && length(x) > 0) + x <- x[-1] # remove first element because this represents the positive spells (id = 0) + + sum(x) + } + }, FUN.VALUE = NA_real_), FUN.VALUE = rep(NA_real_, soilLayers_N)) + + if (!is.matrix(extraction_summed_mm)) { + extraction_summed_mm <- matrix(extraction_summed_mm, nrow = soilLayers_N, ncol = simTime$no.useyr) + } # aggregate across years for each soil layer - resAgg[nv:(nv+max(ld)-1), ] <- t(round(apply(extraction_duration_days, 1, agg_fun, na.rm = TRUE), 1)) + resAgg[nv:(nv+soilLayers_N-1), ] <- t(round(apply(extraction_duration_days, 1, agg_fun, na.rm = TRUE), 1)) nv <- nv+SoilLayer_MaxNo - resAgg[nv:(nv+max(ld)-1), ] <- t(round(apply(extraction_summed_mm, 1, agg_fun, na.rm = TRUE), 1)) + resAgg[nv:(nv+soilLayers_N-1), ] <- t(round(apply(extraction_summed_mm, 1, agg_fun, na.rm = TRUE), 1)) nv <- nv+SoilLayer_MaxNo rm(percolation, hydred, inputs_by_layer, outputs_by_layer, balance, extraction, storage_use, extraction_duration_days, extraction_summed_mm) @@ -3529,7 +3640,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer #---Aggregation: Daily extreme values #28 - if(any(simulation_timescales=="daily") & aon$dailyTranspirationExtremes) {#mean and SD of DOY and value of minimum/maximum + if(aon$dailyTranspirationExtremes) {#mean and SD of DOY and value of minimum/maximum if(print.debug) print("Aggregation of dailyTranspirationExtremes") if(!exists("transp.dy")) transp.dy <- get_Response_aggL(sc, sw_transp, tscale = "dy", scaler = 10, FUN = sum, x = runData, st = simTime, st2 = simTime2, topL = topL, bottomL = bottomL) @@ -3548,7 +3659,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer rm(extremes) } #29 - if(any(simulation_timescales=="daily") & aon$dailyTotalEvaporationExtremes) { + if(aon$dailyTotalEvaporationExtremes) { if(print.debug) print("Aggregation of dailyTotalEvaporationExtremes") if(!exists("Esoil.dy")) Esoil.dy <- get_Response_aggL(sc, sw_evsoil, tscale = "dy", scaler = 10, FUN = sum, x = runData, st = simTime, st2 = simTime2, topL = topL, bottomL = bottomL) if(!exists("Esurface.dy")) Esurface.dy <- get_Esurface_dy(sc, runData, simTime) @@ -3568,7 +3679,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer rm(extremes) } #30 - if(any(simulation_timescales=="daily") & aon$dailyDrainageExtremes) { + if(aon$dailyDrainageExtremes) { if(print.debug) print("Aggregation of dailyDrainageExtremes") if(!exists("deepDrain.dy")) deepDrain.dy <- get_DeepDrain_dy(sc, runData, simTime) @@ -3586,7 +3697,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer rm(extremes) } #31 - if(any(simulation_timescales=="daily") & aon$dailyInfiltrationExtremes) { + if(aon$dailyInfiltrationExtremes) { if(print.debug) print("Aggregation of dailyInfiltrationExtremes") if(!exists("inf.dy")) inf.dy <- get_Inf_dy(sc, runData, simTime) @@ -3604,7 +3715,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer rm(extremes) } #32 - if(any(simulation_timescales=="daily") & aon$dailyAETExtremes) { + if(aon$dailyAETExtremes) { if(print.debug) print("Aggregation of dailyAETExtremes") if(!exists("AET.dy")) AET.dy <- get_AET_dy(sc, runData, simTime) @@ -3622,7 +3733,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer rm(extremes) } #33 - if(any(simulation_timescales=="daily") & aon$dailySWPextremes){ + if(aon$dailySWPextremes){ if(print.debug) print("Aggregation of dailySWPextremes") if(!exists("vwcmatric.dy")) vwcmatric.dy <- get_Response_aggL(sc, sw_vwcmatric, tscale = "dy", scaler = 1, FUN = weighted.mean, weights = layers_width, x = runData, st = simTime, st2 = simTime2, topL = topL, bottomL = bottomL) if(!exists("swpmatric.dy")) swpmatric.dy <- get_SWPmatric_aggL(vwcmatric.dy, texture, sand, clay) @@ -3646,7 +3757,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer rm(extremes) } #34 - if(any(simulation_timescales=="daily") & aon$dailyRechargeExtremes){ + if(aon$dailyRechargeExtremes){ if(print.debug) print("Aggregation of dailyRechargeExtremes") if(!exists("swcbulk.dy")) swcbulk.dy <- get_Response_aggL(sc, sw_swcbulk, tscale = "dy", scaler = 10, FUN = sum, x = runData, st = simTime, st2 = simTime2, topL = topL, bottomL = bottomL) @@ -3676,22 +3787,21 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer } #---Aggregation: Ecological dryness - #35a regimes_done <- FALSE - if (any(simulation_timescales == "daily") && aon$dailyNRCS_SoilMoistureTemperatureRegimes) { + if (aon$dailyNRCS_SoilMoistureTemperatureRegimes) { if (print.debug) print("Aggregation of dailyNRCS_SoilMoistureTemperatureRegimes") #Based on references provided by Chambers, J. C., D. A. Pyke, J. D. Maestas, M. Pellant, C. S. Boyd, S. B. Campbell, S. Espinosa, D. W. Havlina, K. E. Mayer, and A. Wuenschel. 2014. Using Resistance and Resilience Concepts to Reduce Impacts of Invasive Annual Grasses and Altered Fire Regimes on the Sagebrush Ecosystem and Greater Sage-Grouse: A Strategic Multi-Scale Approach. Gen. Tech. Rep. RMRS-GTR-326. U.S. Department of Agriculture, Forest Service, Rocky Mountain Research Station, Fort Collins, CO. #Soil Survey Staff. 2014. Keys to soil taxonomy, 12th ed., USDA Natural Resources Conservation Service, Washington, DC. #Soil Survey Staff. 2010. Keys to soil taxonomy, 11th ed., USDA Natural Resources Conservation Service, Washington, DC. #Result containers - Tregime_names <- c("Hyperthermic", "Thermic", "Mesic", "Frigid", "Cryic", "Gelic") - Tregime <- rep(0, length(Tregime_names)) - names(Tregime) <- Tregime_names - Sregime_names <- c("Anyhydrous", "Aridic", "Udic", "Ustic", "Xeric") - Sregime <- rep(0, length(Sregime_names)) - names(Sregime) <- Sregime_names + temp <- c("Hyperthermic", "Thermic", "Mesic", "Frigid", "Cryic", "Gelic") + Tregime <- rep(0, length(temp)) + names(Tregime) <- temp + temp <- c("Anyhydrous", "Aridic", "Udic", "Ustic", "Xeric") + Sregime <- rep(0, length(temp)) + names(Sregime) <- temp MCS_depth <- Lanh_depth <- rep(NA, 2) Fifty_depth <- permafrost <- NA @@ -3700,344 +3810,351 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer "CSPartSummer", paste0("V", 6:25)) if (swSite_SoilTemperatureFlag(swRunScenariosData[[sc]])) { #we need soil temperature - if (!exists("soiltemp.yr.all")) soiltemp.yr.all <- get_Response_aggL(sc, sw_soiltemp, tscale = "yrAll", scaler = 1, FUN = weighted.mean, weights = layers_width, x = runData, st = simTime, st2 = simTime2, topL = topL, bottomL = bottomL) - if (!exists("soiltemp.mo.all")) soiltemp.mo.all <- get_Response_aggL(sc, sw_soiltemp, tscale = "moAll", scaler = 1, FUN = weighted.mean, weights = layers_width, x = runData, st = simTime, st2 = simTime2, topL = topL, bottomL = bottomL) if (!exists("soiltemp.dy.all")) soiltemp.dy.all <- get_Response_aggL(sc, sw_soiltemp, tscale = "dyAll", scaler = 1, FUN = weighted.mean, weights = layers_width, x = runData, st = simTime, st2 = simTime2, topL = topL, bottomL = bottomL) - if (!exists("vwcmatric.dy.all")) vwcmatric.dy.all <- get_Response_aggL(sc, sw_vwcmatric, tscale = "dyAll", scaler = 1, FUN = weighted.mean, weights = layers_width, x = runData, st = simTime, st2 = simTime2, topL = topL, bottomL = bottomL) - if (!exists("swpmatric.dy.all")) swpmatric.dy.all <- get_SWPmatric_aggL(vwcmatric.dy.all, texture, sand, clay) - if (!exists("prcp.yr")) prcp.yr <- get_PPT_yr(sc, runData, simTime) - if (!exists("prcp.mo")) prcp.mo <- get_PPT_mo(sc, runData, simTime) - - #Parameters - SWP_dry <- -1.5 #dry means SWP below -1.5 MPa (Soil Survey Staff 2014: p.29) - SWP_sat <- -0.033 #saturated means SWP above -0.033 MPa - impermeability <- 0.9 #impermeable layer - - #Required soil layers - soildat <- swSoils_Layers(swRunScenariosData[[sc]])[, c("depth_cm", "sand", "clay", "imperm")] - #50cm soil depth or impermeable layer (whichever is shallower; Soil Survey Staff 2014: p.31) - imp_depth <- which(soildat[, "imperm"] >= impermeability) - imp_depth <- min(imp_depth, max(soildat[, "depth_cm"])) #Interpret maximum soil depth as possible impermeable layer - Fifty_depth <- min(50, imp_depth) - - #Definition of MCS (Soil Survey Staff 2014: p.29): The moisture control section (MCS) of a soil: the depth to which a dry (tension of more than 1500 kPa, but not air-dry) soil will be moistened by 2.5 cm of water within 24 hours. The lower boundary is the depth to which a dry soil will be moistened by 7.5 cm of water within 48 hours. - sand_temp <- weighted.mean(sand, layers_width) - clay_temp <- weighted.mean(clay, layers_width) - #Practical depth definition of MCS - # - 10 to 30 cm below the soil surface if the particle-size class of the soil is fine-loamy, coarse-silty, fine-silty, or clayey - # - 20 to 60 cm if the particle-size class is coarse-loamy - # - 30 to 90 cm if the particle-size class is sandy. - MCS_depth <- if(clay_temp >= 0.18) { c(10, 30) - } else if(sand_temp < 0.15){ c(10, 30) - } else if(sand_temp >= 0.50){ c(30, 90) - } else c(20, 60) - #If 7.5 cm of water moistens the soil to a densic, lithic, paralithic, or petroferric contact or to a petrocalcic or petrogypsic horizon or a duripan, the contact or the upper boundary of the cemented horizon constitutes the lower boundary of the soil moisture control section. If a soil is moistened to one of these contacts or horizons by 2.5 cm of water, the soil moisture control section is the boundary of the contact itself. The control section of such a soil is considered moist if the contact or upper boundary of the cemented horizon has a thin film of water. If that upper boundary is dry, the control section is considered dry. - - MCS_depth <- adjustLayer_byImp(depths = MCS_depth, imp_depth = imp_depth, sdepths = soildat[, "depth_cm"]) - - #Soil layer 10-70 cm used for anhydrous layer definition; adjusted for impermeable layer - Lanh_depth <- adjustLayer_byImp(depths = c(10, 70), imp_depth = imp_depth, sdepths = soildat[, "depth_cm"]) - - #Permafrost (Soil Survey Staff 2014: p.28) is defined as a thermal condition in which a material (including soil material) remains below 0 C for 2 or more years in succession - permafrost <- any(apply(soiltemp.yr.all$val[simTime$index.useyr, -1, drop = FALSE], 2, function(x) { - temp <- rle(x < 0) - any(temp$values) && any(temp$lengths[temp$values] >= 2) - })) - - #Set soil depths and intervals accounting for shallow soil profiles: Soil Survey Staff 2014: p.31) - soiltemp_nrsc <- list() - soiltemp_nrsc[["yr"]] <- list(data = soiltemp.yr.all$val, nheader = 1) - soiltemp_nrsc[["mo"]] <- list(data = soiltemp.mo.all$val, nheader = 2) - soiltemp_nrsc[["dy"]] <- list(data = soiltemp.dy.all$val, nheader = 2) - vwc_dy_nrsc <- vwcmatric.dy.all - - ##Calculate soil temperature at necessary depths using a weighted mean - i_depth50 <- findInterval(Fifty_depth, soildat[, "depth_cm"]) - calc50 <- !(Fifty_depth == soildat[i_depth50, "depth_cm"]) - if (calc50) { - weights50 <- abs(Fifty_depth - soildat[i_depth50 + c(1, 0), "depth_cm"]) - soildat <- t(add_layer_to_soil(t(soildat), i_depth50, weights50)) - i_depth50 <- findInterval(Fifty_depth, soildat[, "depth_cm"]) - soiltemp_nrsc <- lapply(soiltemp_nrsc, function(st) - list(data = add_layer_to_soil(st[["data"]], st[["nheader"]] + i_depth50, weights50), - nheader = st[["nheader"]])) - vwc_dy_nrsc$val <- add_layer_to_soil(vwc_dy_nrsc$val, 2 + i_depth50, weights50) - } + if (!anyNA(soiltemp.dy.all$val) && all(soiltemp.dy.all$val[, -(1:2)] < 100)) { + # 100 C as upper realistic limit from Garratt, J.R. (1992). Extreme maximum land surface temperatures. Journal of Applied Meteorology, 31, 1096-1105. + if (!exists("soiltemp.yr.all")) soiltemp.yr.all <- get_Response_aggL(sc, sw_soiltemp, tscale = "yrAll", scaler = 1, FUN = weighted.mean, weights = layers_width, x = runData, st = simTime, st2 = simTime2, topL = topL, bottomL = bottomL) + if (!exists("soiltemp.mo.all")) soiltemp.mo.all <- get_Response_aggL(sc, sw_soiltemp, tscale = "moAll", scaler = 1, FUN = weighted.mean, weights = layers_width, x = runData, st = simTime, st2 = simTime2, topL = topL, bottomL = bottomL) + if (!exists("vwcmatric.dy.all")) vwcmatric.dy.all <- get_Response_aggL(sc, sw_vwcmatric, tscale = "dyAll", scaler = 1, FUN = weighted.mean, weights = layers_width, x = runData, st = simTime, st2 = simTime2, topL = topL, bottomL = bottomL) + if (!exists("swpmatric.dy.all")) swpmatric.dy.all <- get_SWPmatric_aggL(vwcmatric.dy.all, texture, sand, clay) + if (!exists("prcp.yr")) prcp.yr <- get_PPT_yr(sc, runData, simTime) + if (!exists("prcp.mo")) prcp.mo <- get_PPT_mo(sc, runData, simTime) + + #Parameters + SWP_dry <- -1.5 #dry means SWP below -1.5 MPa (Soil Survey Staff 2014: p.29) + SWP_sat <- -0.033 #saturated means SWP above -0.033 MPa + impermeability <- 0.9 #impermeable layer + + #Required soil layers + soildat <- swSoils_Layers(swRunScenariosData[[sc]])[, c("depth_cm", "sand", "clay", "imperm"), drop = FALSE] + #50cm soil depth or impermeable layer (whichever is shallower; Soil Survey Staff 2014: p.31) + imp_depth <- which(soildat[, "imperm"] >= impermeability) + imp_depth <- min(imp_depth, max(soildat[, "depth_cm"])) #Interpret maximum soil depth as possible impermeable layer + Fifty_depth <- min(50, imp_depth) + + #Definition of MCS (Soil Survey Staff 2014: p.29): The moisture control section (MCS) of a soil: the depth to which a dry (tension of more than 1500 kPa, but not air-dry) soil will be moistened by 2.5 cm of water within 24 hours. The lower boundary is the depth to which a dry soil will be moistened by 7.5 cm of water within 48 hours. + sand_temp <- weighted.mean(sand, layers_width) + clay_temp <- weighted.mean(clay, layers_width) + #Practical depth definition of MCS + # - 10 to 30 cm below the soil surface if the particle-size class of the soil is fine-loamy, coarse-silty, fine-silty, or clayey + # - 20 to 60 cm if the particle-size class is coarse-loamy + # - 30 to 90 cm if the particle-size class is sandy. + MCS_depth <- if(clay_temp >= 0.18) { c(10, 30) + } else if(sand_temp < 0.15){ c(10, 30) + } else if(sand_temp >= 0.50){ c(30, 90) + } else c(20, 60) + #If 7.5 cm of water moistens the soil to a densic, lithic, paralithic, or petroferric contact or to a petrocalcic or petrogypsic horizon or a duripan, the contact or the upper boundary of the cemented horizon constitutes the lower boundary of the soil moisture control section. If a soil is moistened to one of these contacts or horizons by 2.5 cm of water, the soil moisture control section is the boundary of the contact itself. The control section of such a soil is considered moist if the contact or upper boundary of the cemented horizon has a thin film of water. If that upper boundary is dry, the control section is considered dry. + + MCS_depth <- adjustLayer_byImp(depths = MCS_depth, imp_depth = imp_depth, sdepths = soildat[, "depth_cm"]) + + #Soil layer 10-70 cm used for anhydrous layer definition; adjusted for impermeable layer + Lanh_depth <- adjustLayer_byImp(depths = c(10, 70), imp_depth = imp_depth, sdepths = soildat[, "depth_cm"]) + + #Permafrost (Soil Survey Staff 2014: p.28) is defined as a thermal condition in which a material (including soil material) remains below 0 C for 2 or more years in succession + permafrost <- any(apply(soiltemp.yr.all$val[simTime$index.useyr, -1, drop = FALSE], 2, function(x) { + temp <- rle(x < 0) + any(temp$values) && any(temp$lengths[temp$values] >= 2) + })) + + #Set soil depths and intervals accounting for shallow soil profiles: Soil Survey Staff 2014: p.31) + soiltemp_nrsc <- list( + yr = list(data = soiltemp.yr.all$val, nheader = 1), + mo = list(data = soiltemp.mo.all$val, nheader = 2), + dy = list(data = soiltemp.dy.all$val, nheader = 2) + ) + vwc_dy_nrsc <- vwcmatric.dy.all - i_MCS <- findInterval(MCS_depth, soildat[, "depth_cm"]) - calcMCS <- !(MCS_depth == soildat[i_MCS, "depth_cm"]) - if (any(calcMCS)) for (k in which(calcMCS)) { - weightsMCS <- abs(MCS_depth[k] - soildat[i_MCS[k] + c(1, 0), "depth_cm"]) - soildat <- t(add_layer_to_soil(t(soildat), i_MCS[k], weightsMCS)) - i_MCS <- findInterval(MCS_depth, soildat[, "depth_cm"]) + ##Calculate soil temperature at necessary depths using a weighted mean + i_depth50 <- findInterval(Fifty_depth, soildat[, "depth_cm"]) + calc50 <- !(Fifty_depth == soildat[i_depth50, "depth_cm"]) + if (calc50) { + weights50 <- abs(Fifty_depth - soildat[i_depth50 + c(1, 0), "depth_cm"]) + soildat <- t(add_layer_to_soil(t(soildat), i_depth50, weights50)) + i_depth50 <- findInterval(Fifty_depth, soildat[, "depth_cm"]) + + soiltemp_nrsc <- lapply(soiltemp_nrsc, function(st) + list(data = add_layer_to_soil(st[["data"]], st[["nheader"]] + i_depth50, weights50), + nheader = st[["nheader"]])) + vwc_dy_nrsc$val <- add_layer_to_soil(vwc_dy_nrsc$val, 2 + i_depth50, weights50) + rm(weights50) + } - soiltemp_nrsc <- lapply(soiltemp_nrsc, function(st) - list(data = add_layer_to_soil(st[["data"]], st[["nheader"]] + i_MCS[k], weightsMCS), - nheader = st[["nheader"]])) - vwc_dy_nrsc$val <- add_layer_to_soil(vwc_dy_nrsc$val, 2 + i_MCS[k], weightsMCS) - } + i_MCS <- findInterval(MCS_depth, soildat[, "depth_cm"]) + calcMCS <- !(MCS_depth == soildat[i_MCS, "depth_cm"]) + if (any(calcMCS)) for (k in which(calcMCS)) { + weightsMCS <- abs(MCS_depth[k] - soildat[i_MCS[k] + c(1, 0), "depth_cm"]) + soildat <- t(add_layer_to_soil(t(soildat), i_MCS[k], weightsMCS)) + i_MCS <- findInterval(MCS_depth, soildat[, "depth_cm"]) + + soiltemp_nrsc <- lapply(soiltemp_nrsc, function(st) + list(data = add_layer_to_soil(st[["data"]], st[["nheader"]] + i_MCS[k], weightsMCS), + nheader = st[["nheader"]])) + vwc_dy_nrsc$val <- add_layer_to_soil(vwc_dy_nrsc$val, 2 + i_MCS[k], weightsMCS) + rm(weightsMCS) + } - i_Lanh <- findInterval(Lanh_depth, soildat[, "depth_cm"]) - calcLanh <- !(Lanh_depth == soildat[i_Lanh, "depth_cm"]) - if (any(calcLanh)) for (k in which(calcLanh)) { - weightsLanh <- abs(Lanh_depth[k] - soildat[i_Lanh[k] + c(1, 0), "depth_cm"]) - soildat <- t(add_layer_to_soil(t(soildat), i_Lanh[k], weightsLanh)) i_Lanh <- findInterval(Lanh_depth, soildat[, "depth_cm"]) + calcLanh <- !(Lanh_depth == soildat[i_Lanh, "depth_cm"]) + if (any(calcLanh)) for (k in which(calcLanh)) { + weightsLanh <- abs(Lanh_depth[k] - soildat[i_Lanh[k] + c(1, 0), "depth_cm"]) + soildat <- t(add_layer_to_soil(t(soildat), i_Lanh[k], weightsLanh)) + i_Lanh <- findInterval(Lanh_depth, soildat[, "depth_cm"]) + + soiltemp_nrsc <- lapply(soiltemp_nrsc, function(st) + list(data = add_layer_to_soil(st[["data"]], st[["nheader"]] + i_Lanh[k], weightsLanh), + nheader = st[["nheader"]])) + vwc_dy_nrsc$val <- add_layer_to_soil(vwc_dy_nrsc$val, 2 + i_Lanh[k], weightsLanh) + rm(weightsLanh) + } - soiltemp_nrsc <- lapply(soiltemp_nrsc, function(st) - list(data = add_layer_to_soil(st[["data"]], st[["nheader"]] + i_Lanh[k], weightsLanh), - nheader = st[["nheader"]])) - vwc_dy_nrsc$val <- add_layer_to_soil(vwc_dy_nrsc$val, 2 + i_Lanh[k], weightsLanh) - } - - if (calc50 || any(calcMCS) || any(calcLanh)) { - if (!be.quiet) print(paste0(i_label, " interpolated soil layers for NRCS soil regimes", - " because of insufficient soil layers: required would be {", - paste(sort(unique(c(Fifty_depth, MCS_depth, Lanh_depth))), collapse = ", "), - "} and available are {", - paste(layers_depth, collapse = ", "), "}")) - - swp_dy_nrsc <- get_SWPmatric_aggL(vwc_dy_nrsc, texture = texture, - sand = soildat[, "sand"], clay = soildat[, "clay"]) + if (calc50 || any(calcMCS) || any(calcLanh)) { + if (!be.quiet) print(paste0(i_label, " interpolated soil layers for NRCS soil regimes", + " because of insufficient soil layers: required would be {", + paste(sort(unique(c(Fifty_depth, MCS_depth, Lanh_depth))), collapse = ", "), + "} and available are {", + paste(layers_depth, collapse = ", "), "}")) - } else { - swp_dy_nrsc <- swpmatric.dy.all - } + swp_dy_nrsc <- get_SWPmatric_aggL(vwc_dy_nrsc, texture = texture, + sand = soildat[, "sand"], clay = soildat[, "clay"]) - soiltemp_nrsc <- lapply(soiltemp_nrsc, function(st) st[["data"]]) - swp_dy_nrsc <- swp_dy_nrsc$val[simTime$index.usedy, -(1:2)] - - #MCS (Soil Survey Staff 2014: p.29) - #What soil layer info used for MCS - i_MCS <- identify_soillayers(MCS_depth, soildat[, "depth_cm"]) - #Repeat for Anhydrous soil layer moisture delineation - i_Lanh <- identify_soillayers(Lanh_depth, soildat[, "depth_cm"]) - - #mean soil temperature in Lahn depths (10 - 70 cm) - temp_annual[, "MATLanh"] <- apply(soiltemp_nrsc[["yr"]][simTime$index.useyr, 1 + i_Lanh, drop = FALSE], 1, - weighted.mean, w = soildat[i_Lanh, "depth_cm"]) - - #---Calculate variables - #Water year starting Oct 1 - wateryears <- simTime2$year_ForEachUsedDay_NSadj + ifelse(simTime2$doy_ForEachUsedDay_NSadj > 273, 1, 0) # 1. water-year: N-hemisphere: October 1st = 1 day of water year; S-hemisphere: April 1st = 1 day of water year - wyears <- (temp <- unique(wateryears))[-length(temp)]#eliminate last year - - #mean soil temperatures at 50cm depth - temp_annual[, "MAT50"] <- soiltemp_nrsc[["yr"]][simTime$index.useyr, 1 + i_depth50] - temp <- soiltemp_nrsc[["mo"]][simTime$index.usemo, 2 + i_depth50][simTime2$month_ForEachUsedMonth_NSadj %in% 6:8] - temp_annual[, "T50jja"] <- apply(matrix(temp, ncol = simTime$no.useyr), 2, mean) - temp <- soiltemp_nrsc[["mo"]][simTime$index.usemo, 2 + i_depth50][simTime2$month_ForEachUsedMonth_NSadj %in% c(12, 1:2)] - temp_annual[, "T50djf"] <- apply(matrix(temp, ncol = simTime$no.useyr), 2, mean) - T50 <- soiltemp_nrsc[["dy"]][simTime$index.usedy, 2 + i_depth50] - #Moist and dry at 50cm depth for MCS and Lahn calcs - - #CSPartSummer: Is the soil saturated with water during some part of the summer June1 (=regular doy 244) - Aug31 (=regular doy 335) - isummer <- simTime2$doy_ForEachUsedDay_NSadj >= 244 & simTime2$doy_ForEachUsedDay_NSadj <= 335 - temp_annual[, "CSPartSummer"] <- vapply(wyears, function(yr) { - temp <- apply(swp_dy_nrsc[wateryears == yr & isummer, ], 1, - function(x) all(x >= SWP_sat)) - rtemp <- rle(temp) - if(any(rtemp$values)) max(rtemp$lengths[rtemp$values]) else 0 - }, FUN.VALUE = NA_real_) - - am <- colMeans(temp_annual[, c("MAT50", "T50jja", "T50djf", "CSPartSummer")]) - - #---Soil temperature regime: based on Chambers et al. 2014: Appendix 3 and on Soil Survey Staff 2010: p.28/Soil Survey Staff 2014: p.31 - #we ignore distinction between iso- and not iso- - if (am["MAT50"] >= 22) { - Tregime["Hyperthermic"] <- 1L - } else if (am["MAT50"] >= 15){ - Tregime["Thermic"] <- 1L - } else if (am["MAT50"] >= 8){ - Tregime["Mesic"] <- 1L - } else if (am["MAT50"] < 8 && am["MAT50"] > 0) { - if (am["CSPartSummer"] > 0) { - # ignoring organic soils, Saturated with water - if (am["T50jja"] > 0 && am["T50jja"] < 13) { - # ignoring O-horizon - Tregime["Cryic"] <- 1L - } else { - Tregime["Frigid"] <- 1L - } } else { - # ignoring O-horizon and histic epipedon, Not saturated with water - if (am["T50jja"] > 0 && am["T50jja"] < 15) { - Tregime["Cryic"] <- 1L - } else { - Tregime["Frigid"] <- 1L - } + swp_dy_nrsc <- swpmatric.dy.all } - } else if (am["MAT50"] <= 0 || permafrost) { - # limit should be 1 C for Gelisols - Tregime["Gelic"] <- 1L - } - - #Normal years for soil moisture regimes (Soil Survey Staff 2014: p.29) - #Should have a time period of 30 years to determine normal years - MAP <- c(mean(prcp.yr$ppt), sd(prcp.yr$ppt)) - normal1 <- (prcp.yr$ppt >= MAP[1] - MAP[2]) & (prcp.yr$ppt <= MAP[1] + MAP[2]) - MMP <- tapply(prcp.mo$ppt, - simTime2$month_ForEachUsedMonth_NSadj, - function(x) c(mean(x), sd(x))) - MMP <- matrix(unlist(MMP), nrow = 2, ncol = 12) - normal2 <- tapply(prcp.mo$ppt, simTime2$yearno_ForEachUsedMonth_NSadj, - function(x) sum((x >= MMP[1, ] - MMP[2, ]) & (x <= MMP[1, ] + MMP[2, ])) >= 8) - # Normal years = - # - Annual precipitation that is plus or minus one standard precipitation - # - and Mean monthly precipitation that is plus or minus one standard deviation of the long-term monthly precipitation for 8 of the 12 months - wyears_normal <- wyears[normal1 & normal2] - wyears_index <- findInterval(wyears_normal, wyears) - wdays_index <- wateryears %in% wyears_normal - days_per_wyear <- as.integer(table(wateryears[wdays_index], dnn = FALSE)) - - if (length(wyears_normal) > 2) { - #Structures used Lanh delinieation - #Days are moists in half of the Lanh soil depth (and not soil layers!) - n_Lanh <- length(i_Lanh) - width_Lanh <- diff(c(0, soildat[, "depth_cm"]))[i_Lanh] # stopifnot(sum(width_Lanh) == Lanh_depth[2] - Lanh_depth[1]) - temp <- swp_dy_nrsc[wdays_index, i_Lanh, drop = FALSE] > SWP_dry - temp <- temp * matrix(width_Lanh, nrow = sum(wdays_index), ncol = length(i_Lanh), byrow = TRUE) - Lanh_Dry_Half <- .rowSums(temp, m = sum(wdays_index), n = n_Lanh) <= sum(width_Lanh) / 2 - - #Conditions for Anhydrous soil delineation - LanhConditionalDF <- data.frame( - Years = rep(wyears_normal, days_per_wyear), - T50_at0C = T50[wdays_index] > 0, # days where T @ 50 is > 0 C - Lanh_Dry_Half = Lanh_Dry_Half, - MAT50 = rep(temp_annual[wyears_index, "MAT50"], days_per_wyear), - MATLanh = rep(temp_annual[wyears_index, "MATLanh"], days_per_wyear) - ) - #Mean Annual soil temperature is less than or equal to 0C - LanhConditionalDF$COND1 <- LanhConditionalDF$MAT50 <= 0 - #Soil temperature in the Lahn Depth is never greater than 5 - LanhConditionalDF$COND2 <- LanhConditionalDF$MATLanh <= 5 - #In the Lahn Depth, 1/2 of soil dry > 1/2 CUMULATIVE days when Mean Annual ST > 0C - LanhConditionalDF$COND3_Test <- LanhConditionalDF$Lanh_Dry_Half == LanhConditionalDF$T50_at0C #TRUE = where are both these conditions met - temp <- with(LanhConditionalDF, tapply(COND3_Test, Years, sum)) - LanhConditionalDF$HalfDryDaysCumAbove0C <- rep(temp, days_per_wyear) - temp <- with(LanhConditionalDF, tapply(T50_at0C, Years, sum)) - LanhConditionalDF$SoilAbove0C <- rep(temp, days_per_wyear) - LanhConditionalDF$COND3 <- LanhConditionalDF$HalfDryDaysCumAbove0C > .5 * LanhConditionalDF$SoilAbove0C #TRUE = Half of soil layers are dry greater than half the days where MAST >0c - LanhConditionalDF3 <- apply(aggregate(LanhConditionalDF[, c('COND1', 'COND2', 'COND3')], - by = list(LanhConditionalDF$Years), - function(x) sum(x) >= sum(!x)), - 2, function(x) sum(x) >= sum(!x)) - - #Structures used for MCS delineation - ConditionalDF <- data.frame( - Years = rep(wyears_normal, days_per_wyear), - DOY = simTime2$doy_ForEachUsedDay_NSadj[wdays_index], - MAT50 = rep(temp_annual[wyears_index, "MAT50"], days_per_wyear), - T50_at5C = T50[wdays_index] > 5, # days where T @ 50cm exceeds 5C - T50_at8C = T50[wdays_index] > 8, # days where T @ 50cm exceeds 8C - MCS_Moist_All = apply(swp_dy_nrsc[wdays_index, i_MCS, drop = FALSE] > SWP_dry, 1, all), - MCS_Dry_All = apply(swp_dy_nrsc[wdays_index, i_MCS, drop = FALSE] < SWP_dry, 1, all), - T50jja = rep(temp_annual[wyears_index, "T50jja"], days_per_wyear), - T50djf = rep(temp_annual[wyears_index, "T50djf"], days_per_wyear) - ) - #COND1 - Dry in ALL parts for more than half of the CUMULATIVE days per year when the soil temperature at a depth of 50cm is above 5C - ConditionalDF$COND1_Test <- ConditionalDF$MCS_Dry_All & ConditionalDF$T50_at5C #TRUE = where are both these conditions met - temp <- with(ConditionalDF, tapply(COND1_Test, Years, sum)) - ConditionalDF$DryDaysCumAbove5C <- rep(temp, days_per_wyear) - temp <- with(ConditionalDF, tapply(T50_at5C, Years, sum)) - ConditionalDF$SoilAbove5C <- rep(temp, days_per_wyear) - ConditionalDF$COND1 <- ConditionalDF$DryDaysCumAbove5C > .5 * ConditionalDF$SoilAbove5C #TRUE =Soils are dry greater than 1/2 cumulative days/year - - #COND1.1 - Moist in SOME or ALL parts for more than half of the CUMMULATIVE days per year when the soil temperature at a depth of 50cm is above 5 - #!MCS_Dry_All = MCS_ Moist Any; !MCS_Moist_All = MCS_Dry Any - #This Test is kind of redundant basically if COND1 is TRUE than - ConditionalDF$COND1_1_Test <- !ConditionalDF$MCS_Dry_All & ConditionalDF$T50_at5C #TRUE = where are both these conditions met - temp <- with(ConditionalDF, tapply(COND1_1_Test, Years, sum)) - ConditionalDF$AnyMoistDaysCumAbove5C <- rep(temp, days_per_wyear) - ConditionalDF$COND1_1 <- ConditionalDF$AnyMoistDaysCumAbove5C > .5 * ConditionalDF$SoilAbove5C - #Cond2 - Moist in SOME or all parts for less than 90 CONSECUTIVE days when the the soil temperature at a depth of 50cm is above 8C - ConditionalDF$COND2_Test <- !ConditionalDF$MCS_Dry_All & ConditionalDF$T50_at8C #TRUE = where are both these conditions met - temp <- with(ConditionalDF, tapply(COND2_Test, Years, max.duration)) # Maximum consecutive days - ConditionalDF$COND2 <- rep(temp < 90, days_per_wyear) # TRUE = moist less than 90 consecutive days during >8 C soils, FALSE = moist more than 90 consecutive days - - #COND3 - MCS is Not dry in ANY part as long as 90 CUMULATIVE days - Can't be dry longer than 90 cum days - temp <- with(ConditionalDF, tapply(!MCS_Moist_All, Years, sum)) #Number of days where any soils are dry - ConditionalDF$DryDaysCumAny <- rep(temp, days_per_wyear) - ConditionalDF$COND3 <- ConditionalDF$DryDaysCumAny < 90 #TRUE = Not Dry for as long 90 cumlative days,FALSE = Dry as long as as 90 Cumlative days - - #COND4 - The means annual soil temperature at 50cm is < or > 22C - ConditionalDF$COND4 <- ConditionalDF$MAT50 > 22 #TRUE - Greater than 22, False - Less than 22 - - #COND5 - The absolute difference between the temperature in winter @ 50cm and the temperature in summer @ 50cm is > or < 6 - ConditionalDF$COND5 <- abs(ConditionalDF$T50djf - ConditionalDF$T50jja) > 6 #TRUE - Greater than 6, FALSE - Less than 6 - - #COND6 - Dry in ALL parts LESS than 45 CONSECUTIVE days in the 4 months following the summer solstice - temp <- with(ConditionalDF[ConditionalDF$DOY %in% c(172:293),], tapply(MCS_Dry_All, Years, max.duration)) #Consecutive days of dry soil after summer solsitice - ConditionalDF$DryDaysConsecSummer <- rep(temp, days_per_wyear) - ConditionalDF$COND6 <- ConditionalDF$DryDaysConsecSummer < 45 # TRUE = dry less than 45 consecutive days - - #COND7 - MCS is MOIST in SOME parts for more than 180 CUMULATIVE days - temp <- with(ConditionalDF, tapply(!MCS_Dry_All, Years, function(x) sum(x)))#Number of days where any soils are moist - ConditionalDF$MoistDaysCumAny <- rep(temp, days_per_wyear) - ConditionalDF$COND7 <- ConditionalDF$MoistDaysCumAny > 180 #TRUE = Not Dry or Moist for as long 180 cumlative days - - #Cond8 - MCS is MOIST in SOME parts for more than 90 CONSECUTIVE days - temp <- with(ConditionalDF, tapply(!MCS_Dry_All,Years, max.duration)) #Consecutive days of Moist soil - ConditionalDF$MoistDaysConsecAny <- rep(temp, days_per_wyear) - ConditionalDF$COND8 <- ConditionalDF$MoistDaysConsecAny > 90 # TRUE = Moist more than 90 Consecutive Days - - #COND9 - Moist in ALL parts MORE than 45 CONSECUTIVE days in the 4 months following the winter solstice - temp <- with(ConditionalDF[ConditionalDF$DOY %in% c(355:365, 1:111), ], tapply(MCS_Moist_All, Years, max.duration))#Consecutive days of moist soil after winter solsitice - ConditionalDF$MoistDaysConsecWinter <- rep(temp, days_per_wyear) - ConditionalDF$COND9 <- ConditionalDF$MoistDaysConsecWinter > 45 # TRUE = moist more than 45 consecutive days - - ConditionalDF3 <- apply(aggregate(ConditionalDF[, c('COND1','COND1_1','COND2','COND3','COND4','COND5','COND6','COND7','COND8','COND9')], - by=list(Year=ConditionalDF$Years), - function(x) sum(x) > sum(!x)), - 2, function(x) sum(x) > sum(!x)) - - #---Soil moisture regime: based on Chambers et al. 2014: Appendix 3 and on Soil Survey Staff 2010: p.26-28/Soil Survey Staff 2014: p.28-31 - #we ignore 'Aquic' - - #Anhydrous condition: Soil Survey Staff 2010: p.16/Soil Survey Staff 2014: p.18 - #we ignore test for 'ice-cemented permafrost' and 'rupture-resistance class' - if (LanhConditionalDF3['COND1'] && LanhConditionalDF3['COND2'] && LanhConditionalDF3['COND3']) - Sregime["Anhydrous"] <- 1L - - #Aridic soil moisture regime; The limits set for soil temperature exclude from these soil moisture regimes soils in the very cold and dry polar regions and in areas at high elevations. Such soils are considered to have anhydrous condition - if (ConditionalDF3['COND1'] && ConditionalDF3['COND2'] && !ConditionalDF3['COND3']) - Sregime["Aridic"] <- 1L - - #Udic soil moisture regime - #we ignore test for 'three- phase system' during T50 > 5 - if (ConditionalDF3['COND3']) { - if (!ConditionalDF3['COND4'] && ConditionalDF3['COND5']) { - if (ConditionalDF3['COND6']) - Sregime["Udic"] <- 1L + soiltemp_nrsc <- lapply(soiltemp_nrsc, function(st) st[["data"]]) + swp_dy_nrsc <- swp_dy_nrsc$val[simTime$index.usedy, -(1:2), drop = FALSE] + + #MCS (Soil Survey Staff 2014: p.29) + #What soil layer info used for MCS + i_MCS <- identify_soillayers(MCS_depth, soildat[, "depth_cm"]) + #Repeat for Anhydrous soil layer moisture delineation + i_Lanh <- identify_soillayers(Lanh_depth, soildat[, "depth_cm"]) + + #mean soil temperature in Lahn depths (10 - 70 cm) + temp_annual[, "MATLanh"] <- apply(soiltemp_nrsc[["yr"]][simTime$index.useyr, 1 + i_Lanh, drop = FALSE], 1, + weighted.mean, w = soildat[i_Lanh, "depth_cm"]) + + #---Calculate variables + #Water year starting Oct 1 + wateryears <- simTime2$year_ForEachUsedDay_NSadj + ifelse(simTime2$doy_ForEachUsedDay_NSadj > 273, 1, 0) # 1. water-year: N-hemisphere: October 1st = 1 day of water year; S-hemisphere: April 1st = 1 day of water year + wyears <- (temp <- unique(wateryears))[-length(temp)]#eliminate last year + + #mean soil temperatures at 50cm depth + temp_annual[, "MAT50"] <- soiltemp_nrsc[["yr"]][simTime$index.useyr, 1 + i_depth50] + temp <- soiltemp_nrsc[["mo"]][simTime$index.usemo, 2 + i_depth50][simTime2$month_ForEachUsedMonth_NSadj %in% 6:8] + temp_annual[, "T50jja"] <- apply(matrix(temp, ncol = simTime$no.useyr), 2, mean) + temp <- soiltemp_nrsc[["mo"]][simTime$index.usemo, 2 + i_depth50][simTime2$month_ForEachUsedMonth_NSadj %in% c(12, 1:2)] + temp_annual[, "T50djf"] <- apply(matrix(temp, ncol = simTime$no.useyr), 2, mean) + T50 <- soiltemp_nrsc[["dy"]][simTime$index.usedy, 2 + i_depth50] + #Moist and dry at 50cm depth for MCS and Lahn calcs + + #CSPartSummer: Is the soil saturated with water during some part of the summer June1 (=regular doy 244) - Aug31 (=regular doy 335) + isummer <- simTime2$doy_ForEachUsedDay_NSadj >= 244 & simTime2$doy_ForEachUsedDay_NSadj <= 335 + temp_annual[, "CSPartSummer"] <- vapply(wyears, function(yr) { + temp <- apply(swp_dy_nrsc[wateryears == yr & isummer, , drop = FALSE], 1, + function(x) all(x >= SWP_sat)) + rtemp <- rle(temp) + if(any(rtemp$values)) max(rtemp$lengths[rtemp$values]) else 0 + }, FUN.VALUE = NA_real_) + + am <- colMeans(temp_annual[, c("MAT50", "T50jja", "T50djf", "CSPartSummer")]) + + #---Soil temperature regime: based on Chambers et al. 2014: Appendix 3 and on Soil Survey Staff 2010: p.28/Soil Survey Staff 2014: p.31 + #we ignore distinction between iso- and not iso- + if (am["MAT50"] >= 22) { + Tregime["Hyperthermic"] <- 1L + } else if (am["MAT50"] >= 15){ + Tregime["Thermic"] <- 1L + } else if (am["MAT50"] >= 8){ + Tregime["Mesic"] <- 1L + } else if (am["MAT50"] < 8 && am["MAT50"] > 0) { + if (am["CSPartSummer"] > 0) { + # ignoring organic soils, Saturated with water + if (am["T50jja"] > 0 && am["T50jja"] < 13) { + # ignoring O-horizon + Tregime["Cryic"] <- 1L + } else { + Tregime["Frigid"] <- 1L + } } else { - Sregime["Udic"] <- 1L - } + # ignoring O-horizon and histic epipedon, Not saturated with water + if (am["T50jja"] > 0 && am["T50jja"] < 15) { + Tregime["Cryic"] <- 1L + } else { + Tregime["Frigid"] <- 1L + } + } + } else if (am["MAT50"] <= 0 || permafrost) { + # limit should be 1 C for Gelisols + Tregime["Gelic"] <- 1L } - #Ustic soil moisture regime - if (!permafrost) { - if ((ConditionalDF3['COND4'] || !ConditionalDF3['COND5']) && - !ConditionalDF3['COND3'] && (ConditionalDF3['COND7'] || ConditionalDF3['COND8'])) { - Sregime["Ustic"] <- 1L + #Normal years for soil moisture regimes (Soil Survey Staff 2014: p.29) + #Should have a time period of 30 years to determine normal years + MAP <- c(mean(prcp.yr$ppt), sd(prcp.yr$ppt)) + normal1 <- (prcp.yr$ppt >= MAP[1] - MAP[2]) & (prcp.yr$ppt <= MAP[1] + MAP[2]) + MMP <- tapply(prcp.mo$ppt, + simTime2$month_ForEachUsedMonth_NSadj, + function(x) c(mean(x), sd(x))) + MMP <- matrix(unlist(MMP), nrow = 2, ncol = 12) + normal2 <- tapply(prcp.mo$ppt, simTime2$yearno_ForEachUsedMonth_NSadj, + function(x) sum((x >= MMP[1, ] - MMP[2, ]) & (x <= MMP[1, ] + MMP[2, ])) >= 8) + # Normal years = + # - Annual precipitation that is plus or minus one standard precipitation + # - and Mean monthly precipitation that is plus or minus one standard deviation of the long-term monthly precipitation for 8 of the 12 months + wyears_normal <- wyears[normal1 & normal2] + wyears_index <- findInterval(wyears_normal, wyears) + wdays_index <- wateryears %in% wyears_normal + days_per_wyear <- as.integer(table(wateryears[wdays_index], dnn = FALSE)) + + if (length(wyears_normal) > 2) { + #Structures used Lanh delinieation + #Days are moists in half of the Lanh soil depth (and not soil layers!) + n_Lanh <- length(i_Lanh) + width_Lanh <- diff(c(0, soildat[, "depth_cm"]))[i_Lanh] # stopifnot(sum(width_Lanh) == Lanh_depth[2] - Lanh_depth[1]) + temp <- swp_dy_nrsc[wdays_index, i_Lanh, drop = FALSE] > SWP_dry + temp <- temp * matrix(width_Lanh, nrow = sum(wdays_index), ncol = length(i_Lanh), byrow = TRUE) + Lanh_Dry_Half <- .rowSums(temp, m = sum(wdays_index), n = n_Lanh) <= sum(width_Lanh) / 2 + + #Conditions for Anhydrous soil delineation + LanhConditionalDF <- data.frame( + Years = rep(wyears_normal, days_per_wyear), + T50_at0C = T50[wdays_index] > 0, # days where T @ 50 is > 0 C + Lanh_Dry_Half = Lanh_Dry_Half, + MAT50 = rep(temp_annual[wyears_index, "MAT50"], days_per_wyear), + MATLanh = rep(temp_annual[wyears_index, "MATLanh"], days_per_wyear) + ) + #Mean Annual soil temperature is less than or equal to 0C + LanhConditionalDF$COND1 <- LanhConditionalDF$MAT50 <= 0 + #Soil temperature in the Lahn Depth is never greater than 5 + LanhConditionalDF$COND2 <- LanhConditionalDF$MATLanh <= 5 + #In the Lahn Depth, 1/2 of soil dry > 1/2 CUMULATIVE days when Mean Annual ST > 0C + LanhConditionalDF$COND3_Test <- LanhConditionalDF$Lanh_Dry_Half == LanhConditionalDF$T50_at0C #TRUE = where are both these conditions met + temp <- with(LanhConditionalDF, tapply(COND3_Test, Years, sum)) + LanhConditionalDF$HalfDryDaysCumAbove0C <- rep(temp, days_per_wyear) + temp <- with(LanhConditionalDF, tapply(T50_at0C, Years, sum)) + LanhConditionalDF$SoilAbove0C <- rep(temp, days_per_wyear) + LanhConditionalDF$COND3 <- LanhConditionalDF$HalfDryDaysCumAbove0C > .5 * LanhConditionalDF$SoilAbove0C #TRUE = Half of soil layers are dry greater than half the days where MAST >0c + LanhConditionalDF3 <- apply(aggregate(LanhConditionalDF[, c('COND1', 'COND2', 'COND3')], + by = list(LanhConditionalDF$Years), + function(x) sum(x) >= sum(!x)), + 2, function(x) sum(x) >= sum(!x)) + + #Structures used for MCS delineation + ConditionalDF <- data.frame( + Years = rep(wyears_normal, days_per_wyear), + DOY = simTime2$doy_ForEachUsedDay_NSadj[wdays_index], + MAT50 = rep(temp_annual[wyears_index, "MAT50"], days_per_wyear), + T50_at5C = T50[wdays_index] > 5, # days where T @ 50cm exceeds 5C + T50_at8C = T50[wdays_index] > 8, # days where T @ 50cm exceeds 8C + MCS_Moist_All = apply(swp_dy_nrsc[wdays_index, i_MCS, drop = FALSE] > SWP_dry, 1, all), + MCS_Dry_All = apply(swp_dy_nrsc[wdays_index, i_MCS, drop = FALSE] < SWP_dry, 1, all), + T50jja = rep(temp_annual[wyears_index, "T50jja"], days_per_wyear), + T50djf = rep(temp_annual[wyears_index, "T50djf"], days_per_wyear) + ) + + #COND1 - Dry in ALL parts for more than half of the CUMULATIVE days per year when the soil temperature at a depth of 50cm is above 5C + ConditionalDF$COND1_Test <- ConditionalDF$MCS_Dry_All & ConditionalDF$T50_at5C #TRUE = where are both these conditions met + temp <- with(ConditionalDF, tapply(COND1_Test, Years, sum)) + ConditionalDF$DryDaysCumAbove5C <- rep(temp, days_per_wyear) + temp <- with(ConditionalDF, tapply(T50_at5C, Years, sum)) + ConditionalDF$SoilAbove5C <- rep(temp, days_per_wyear) + ConditionalDF$COND1 <- ConditionalDF$DryDaysCumAbove5C > .5 * ConditionalDF$SoilAbove5C #TRUE =Soils are dry greater than 1/2 cumulative days/year + + #COND1.1 - Moist in SOME or ALL parts for more than half of the CUMMULATIVE days per year when the soil temperature at a depth of 50cm is above 5 + #!MCS_Dry_All = MCS_ Moist Any; !MCS_Moist_All = MCS_Dry Any + #This Test is kind of redundant basically if COND1 is TRUE than + ConditionalDF$COND1_1_Test <- !ConditionalDF$MCS_Dry_All & ConditionalDF$T50_at5C #TRUE = where are both these conditions met + temp <- with(ConditionalDF, tapply(COND1_1_Test, Years, sum)) + ConditionalDF$AnyMoistDaysCumAbove5C <- rep(temp, days_per_wyear) + ConditionalDF$COND1_1 <- ConditionalDF$AnyMoistDaysCumAbove5C > .5 * ConditionalDF$SoilAbove5C + #Cond2 - Moist in SOME or all parts for less than 90 CONSECUTIVE days when the the soil temperature at a depth of 50cm is above 8C + ConditionalDF$COND2_Test <- !ConditionalDF$MCS_Dry_All & ConditionalDF$T50_at8C #TRUE = where are both these conditions met + temp <- with(ConditionalDF, tapply(COND2_Test, Years, max.duration)) # Maximum consecutive days + ConditionalDF$COND2 <- rep(temp < 90, days_per_wyear) # TRUE = moist less than 90 consecutive days during >8 C soils, FALSE = moist more than 90 consecutive days + + #COND3 - MCS is Not dry in ANY part as long as 90 CUMULATIVE days - Can't be dry longer than 90 cum days + temp <- with(ConditionalDF, tapply(!MCS_Moist_All, Years, sum)) #Number of days where any soils are dry + ConditionalDF$DryDaysCumAny <- rep(temp, days_per_wyear) + ConditionalDF$COND3 <- ConditionalDF$DryDaysCumAny < 90 #TRUE = Not Dry for as long 90 cumlative days,FALSE = Dry as long as as 90 Cumlative days + + #COND4 - The means annual soil temperature at 50cm is < or > 22C + ConditionalDF$COND4 <- ConditionalDF$MAT50 > 22 #TRUE - Greater than 22, False - Less than 22 + + #COND5 - The absolute difference between the temperature in winter @ 50cm and the temperature in summer @ 50cm is > or < 6 + ConditionalDF$COND5 <- abs(ConditionalDF$T50djf - ConditionalDF$T50jja) > 6 #TRUE - Greater than 6, FALSE - Less than 6 + + #COND6 - Dry in ALL parts LESS than 45 CONSECUTIVE days in the 4 months following the summer solstice + temp <- with(ConditionalDF[ConditionalDF$DOY %in% c(172:293),], tapply(MCS_Dry_All, Years, max.duration)) #Consecutive days of dry soil after summer solsitice + ConditionalDF$DryDaysConsecSummer <- rep(temp, days_per_wyear) + ConditionalDF$COND6 <- ConditionalDF$DryDaysConsecSummer < 45 # TRUE = dry less than 45 consecutive days + + #COND7 - MCS is MOIST in SOME parts for more than 180 CUMULATIVE days + temp <- with(ConditionalDF, tapply(!MCS_Dry_All, Years, function(x) sum(x)))#Number of days where any soils are moist + ConditionalDF$MoistDaysCumAny <- rep(temp, days_per_wyear) + ConditionalDF$COND7 <- ConditionalDF$MoistDaysCumAny > 180 #TRUE = Not Dry or Moist for as long 180 cumlative days + + #Cond8 - MCS is MOIST in SOME parts for more than 90 CONSECUTIVE days + temp <- with(ConditionalDF, tapply(!MCS_Dry_All,Years, max.duration)) #Consecutive days of Moist soil + ConditionalDF$MoistDaysConsecAny <- rep(temp, days_per_wyear) + ConditionalDF$COND8 <- ConditionalDF$MoistDaysConsecAny > 90 # TRUE = Moist more than 90 Consecutive Days + + #COND9 - Moist in ALL parts MORE than 45 CONSECUTIVE days in the 4 months following the winter solstice + temp <- with(ConditionalDF[ConditionalDF$DOY %in% c(355:365, 1:111), ], tapply(MCS_Moist_All, Years, max.duration))#Consecutive days of moist soil after winter solsitice + ConditionalDF$MoistDaysConsecWinter <- rep(temp, days_per_wyear) + ConditionalDF$COND9 <- ConditionalDF$MoistDaysConsecWinter > 45 # TRUE = moist more than 45 consecutive days + + ConditionalDF3 <- apply(aggregate(ConditionalDF[, c('COND1','COND1_1','COND2','COND3','COND4','COND5','COND6','COND7','COND8','COND9')], + by=list(Year=ConditionalDF$Years), + function(x) sum(x) > sum(!x)), + 2, function(x) sum(x) > sum(!x)) + + #---Soil moisture regime: based on Chambers et al. 2014: Appendix 3 and on Soil Survey Staff 2010: p.26-28/Soil Survey Staff 2014: p.28-31 + #we ignore 'Aquic' + + #Anhydrous condition: Soil Survey Staff 2010: p.16/Soil Survey Staff 2014: p.18 + #we ignore test for 'ice-cemented permafrost' and 'rupture-resistance class' + if (LanhConditionalDF3['COND1'] && LanhConditionalDF3['COND2'] && LanhConditionalDF3['COND3']) + Sregime["Anhydrous"] <- 1L + + #Aridic soil moisture regime; The limits set for soil temperature exclude from these soil moisture regimes soils in the very cold and dry polar regions and in areas at high elevations. Such soils are considered to have anhydrous condition + if (ConditionalDF3['COND1'] && ConditionalDF3['COND2'] && !ConditionalDF3['COND3']) + Sregime["Aridic"] <- 1L + + #Udic soil moisture regime - #we ignore test for 'three- phase system' during T50 > 5 + if (ConditionalDF3['COND3']) { + if (!ConditionalDF3['COND4'] && ConditionalDF3['COND5']) { + if (ConditionalDF3['COND6']) + Sregime["Udic"] <- 1L + } else { + Sregime["Udic"] <- 1L + } } - if (!ConditionalDF3['COND4'] && ConditionalDF3['COND5'] && - !ConditionalDF3['COND3'] && !ConditionalDF3['COND1']) { - if (ConditionalDF3['COND9']) { - if (ConditionalDF3['COND6']) - Sregime["Ustic"] <- 1L - } else { - Sregime["Ustic"] <- 1L - } + + #Ustic soil moisture regime + if (!permafrost) { + if ((ConditionalDF3['COND4'] || !ConditionalDF3['COND5']) && + !ConditionalDF3['COND3'] && (ConditionalDF3['COND7'] || ConditionalDF3['COND8'])) { + Sregime["Ustic"] <- 1L + } + if (!ConditionalDF3['COND4'] && ConditionalDF3['COND5'] && + !ConditionalDF3['COND3'] && !ConditionalDF3['COND1']) { + if (ConditionalDF3['COND9']) { + if (ConditionalDF3['COND6']) + Sregime["Ustic"] <- 1L + } else { + Sregime["Ustic"] <- 1L + } + } } - } - #Xeric soil moisture regime - if (!ConditionalDF3['COND6'] && ConditionalDF3['COND9'] && - !ConditionalDF3['COND4'] && ConditionalDF3['COND5'] && - (ConditionalDF3['COND1_1'] || !ConditionalDF3['COND2'])) { - Sregime["Xeric"] <- 1L - } + #Xeric soil moisture regime + if (!ConditionalDF3['COND6'] && ConditionalDF3['COND9'] && + !ConditionalDF3['COND4'] && ConditionalDF3['COND5'] && + (ConditionalDF3['COND1_1'] || !ConditionalDF3['COND2'])) { + Sregime["Xeric"] <- 1L + } temp_annual[wyears_index, 6:8] <- as.matrix(aggregate(LanhConditionalDF[, c('T50_at0C', 'Lanh_Dry_Half', 'HalfDryDaysCumAbove0C')], @@ -4049,17 +4166,40 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer "COND8", "MoistDaysConsecWinter", "COND9")], by = list(ConditionalDF$Years), mean)[, -1]) - regimes_done <- TRUE + regimes_done <- TRUE + + to_del <- c("n_Lanh", "width_Lanh", "Lanh_Dry_Half", "LanhConditionalDF", + "LanhConditionalDF3", "ConditionalDF", "ConditionalDF3") + #to_del <- to_del[to_del %in% ls()] + if (length(to_del) > 0) + try(rm(list = to_del), silent = TRUE) + + } else { + if (!be.quiet) + print(paste(i_label, "Number of normal years not long enough to calculate NRCS soil moisture regimes. Try increasing length of simulation")) + + Sregime[] <- NA + } + + to_del <- c("calc50", "calcLanh", "calcMCS", "clay_temp", "days_per_wyear", + "i_depth50", "i_Lanh", "i_MCS", "imp_depth", "impermeability", + "isummer", "Lanh_depth", "MAP", "MMP", "normal1", "normal2", + "sand_temp", "soildat", "soiltemp_nrsc", "SWP_dry", "swp_dy_nrsc", + "SWP_sat", "vwc_dy_nrsc", "wateryears", "wdays_index", + "wyears", "wyears_index", "wyears_normal") + #to_del <- to_del[to_del %in% ls()] + if (length(to_del) > 0) + try(rm(list = to_del), silent = TRUE) } else { if (!be.quiet) - print(paste0(i_label, "Number of normal years not long enough to calculate NRCS Soil Moisture Regimes. Try increasing length of simulation")) - Tregime[] <- Sregime[] <- NA + print(paste(i_label, "has unrealistic soil temperature values: NRCS soil moisture/temperature regimes not calculated.")) + Tregime[] <- Sregime[] <- NA } } else { if (!be.quiet) - print(paste0(i_label, "soil temperature module turned off but required for NRCS Soil Moisture/Temperature Regimes.")) + print(paste(i_label, "soil temperature module turned off but required for NRCS Soil Moisture/Temperature Regimes.")) Tregime[] <- Sregime[] <- NA } @@ -4076,22 +4216,14 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer resAgg[nv:(nv_new - 1), is_central] <- Sregime nv <- nv_new - to_del <- c("calc50", "calcLanh", "calcMCS", "clay_temp", "ConditionalDF", - "ConditionalDF3", "temp_annual", "am", "days_per_wyear", "Fifty_depth", - "i_depth50", "i_Lanh", "i_MCS", "imp_depth", "impermeability", - "isummer", "Lanh_depth", "Lanh_Dry_Half", "LanhConditionalDF", - "LanhConditionalDF3", "MAP", "MCS_depth", "MMP", "n_Lanh", "normal1", - "normal2", "permafrost", "sand_temp", "soildat", "soiltemp_nrsc", - "SWP_dry", "swp_dy_nrsc", "SWP_sat", "T50", "vwc_dy_nrsc", "wateryears", - "wdays_index", "weights50", "weightsLanh", "weightsMCS", "width_Lanh", - "wyears", "wyears_index", "wyears_normal") - # to_del <- to_del[to_del %in% ls()] + to_del <- c("MCS_depth", "Fifty_depth", "permafrost", "temp_annual") + #to_del <- to_del[to_del %in% ls()] if (length(to_del) > 0) try(rm(list = to_del), silent = TRUE) } #35b - if(any(simulation_timescales=="daily") && aon$dailyNRCS_Chambers2014_ResilienceResistance && aon$dailyNRCS_SoilMoistureTemperatureRegimes){ #Requires "dailyNRCS_SoilMoistureTemperatureRegimes" + if(aon$dailyNRCS_Chambers2014_ResilienceResistance){ #Based on Table 1 in Chambers, J. C., D. A. Pyke, J. D. Maestas, M. Pellant, C. S. Boyd, S. B. Campbell, S. Espinosa, D. W. Havlina, K. E. Mayer, and A. Wuenschel. 2014. Using Resistance and Resilience Concepts to Reduce Impacts of Invasive Annual Grasses and Altered Fire Regimes on the Sagebrush Ecosystem and Greater Sage-Grouse: A Strategic Multi-Scale Approach. Gen. Tech. Rep. RMRS-GTR-326. U.S. Department of Agriculture, Forest Service, Rocky Mountain Research Station, Fort Collins, CO. if(print.debug) print("Aggregation of dailyNRCS_Chambers2014_ResilienceResistance") if(!exists("prcp.yr")) prcp.yr <- get_PPT_yr(sc, runData, simTime) @@ -4101,7 +4233,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer resilience <- resistance <- rep(0, times=length(cats)) names(resilience) <- names(resistance) <- cats - if(regimes_done){ + if (regimes_done && aon$dailyNRCS_SoilMoistureTemperatureRegimes) { #---Table 1 in Chambers et al. 2014 rows_resilience <- c("ModeratelyHigh", "ModeratelyHigh", "Moderate", "Low", "Low") rows_resistance <- c("High", "Moderate", "ModeratelyLow", "Moderate", "Low") @@ -4130,13 +4262,48 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer resAgg[nv:(nv+2*length(cats)-1), is_central] <- c(resilience, resistance) nv <- nv + 2*length(cats) - rm(cats, resilience, resistance, Tregime, Sregime) + rm(cats, resilience, resistance) } - rm(regimes_done) + + #35c + if(aon$dailyNRCS_Maestas2016_ResilienceResistance){ #Requires "dailyNRCS_SoilMoistureTemperatureRegimes" + #Based on Maestas, J.D., Campbell, S.B., Chambers, J.C., Pellant, M. & Miller, R.F. (2016). Tapping Soil Survey Information for Rapid Assessment of Sagebrush Ecosystem Resilience and Resistance. Rangelands, 38, 120-128. + if (print.debug) + print("Aggregation of dailyNRCS_Maestas2016_ResilienceResistance") + + RR <- c(Low = 0, Moderate = 0, High = 0) + + if (regimes_done && aon$dailyNRCS_SoilMoistureTemperatureRegimes) { + #---Table 1 in Maestas et al. 2016 + Table1 <- matrix(c( + "Cryic", "Xeric", "High", + "Frigid", "Xeric", "High", + "Cryic", "Aridic", "Moderate", + "Frigid", "Aridic", "Moderate", + "Mesic", "Xeric", "Moderate", + "Mesic", "Aridic", "Low"), + ncol = 3, byrow = TRUE) + + temp <- Table1[as.logical(Tregime[Table1[, 1]]) & as.logical(Sregime[Table1[, 2]]), 3] + RR[temp] <- 1 + + rm(Table1) + } + + nv_new <- nv + length(RR) + resAgg[nv:(nv_new - 1), is_central] <- RR + nv <- nv_new + + rm(RR) + } + + rm(regimes_done) + if (aon$dailyNRCS_SoilMoistureTemperatureRegimes) + rm(Tregime, Sregime) #35.2 - if(any(simulation_timescales=="daily") & aon$dailyWetDegreeDays){ #Wet degree days on daily temp and swp + if(aon$dailyWetDegreeDays){ #Wet degree days on daily temp and swp if(print.debug) print("Aggregation of dailyWetDegreeDays") if(!exists("vwcmatric.dy")) vwcmatric.dy <- get_Response_aggL(sc, sw_vwcmatric, tscale = "dy", scaler = 1, FUN = weighted.mean, weights = layers_width, x = runData, st = simTime, st2 = simTime2, topL = topL, bottomL = bottomL) if(!exists("swpmatric.dy")) swpmatric.dy <- get_SWPmatric_aggL(vwcmatric.dy, texture, sand, clay) @@ -4183,7 +4350,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer } #35.3 - if(any(simulation_timescales=="daily") & aon$dailyThermalDrynessStartEnd){ + if(aon$dailyThermalDrynessStartEnd){ if (print.debug) print("Aggregation of dailyThermalDrynessStartEnd") if (!exists("temp.dy")) temp.dy <- get_Temp_dy(sc, runData, simTime) if(!exists("vwcmatric.dy")) vwcmatric.dy <- get_Response_aggL(sc, sw_vwcmatric, tscale = "dy", scaler = 1, FUN = weighted.mean, weights = layers_width, x = runData, st = simTime, st2 = simTime2, topL = topL, bottomL = bottomL) @@ -4217,7 +4384,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer } #35.4 - if(any(simulation_timescales=="daily") & aon$dailyThermalSWPConditionCount){ + if(aon$dailyThermalSWPConditionCount){ if(print.debug) print("Aggregation of dailyThermalSWPConditionCount") if(!exists("vwcmatric.dy.all")) vwcmatric.dy.all <- get_Response_aggL(sc, sw_vwcmatric, tscale = "dyAll", scaler = 1, FUN = weighted.mean, weights = layers_width, x = runData, st = simTime, st2 = simTime2, topL = topL, bottomL = bottomL) if(!exists("swpmatric.dy.all")) swpmatric.dy.all <- get_SWPmatric_aggL(vwcmatric.dy.all, texture, sand, clay) @@ -4261,7 +4428,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer #TODO(drs): progress state #36 - if(any(simulation_timescales=="monthly") & aon$monthlySWPdryness){#dry periods based on monthly swp data: accountNSHemispheres_agg + if(aon$monthlySWPdryness){#dry periods based on monthly swp data: accountNSHemispheres_agg if(print.debug) print("Aggregation of monthlySWPdryness") if(!exists("vwcmatric.mo")) vwcmatric.mo <- get_Response_aggL(sc, sw_vwcmatric, tscale = "mo", scaler = 1, FUN = weighted.mean, weights = layers_width, x = runData, st = simTime, st2 = simTime2, topL = topL, bottomL = bottomL) if(!exists("swpmatric.mo")) swpmatric.mo <- get_SWPmatric_aggL(vwcmatric.mo, texture, sand, clay) @@ -4299,7 +4466,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer rm(drymonths.top, drymonths.bottom, years.top, start.top, years.bottom, start.bottom, adjMonths) } #37 - if(any(simulation_timescales=="daily") & aon$dailySWPdrynessANDwetness){#Dry and wet periods based on daily swp: accountNSHemispheres_agg + if(aon$dailySWPdrynessANDwetness){#Dry and wet periods based on daily swp: accountNSHemispheres_agg if(print.debug) print("Aggregation of dailySWPdrynessANDwetness") if(!exists("vwcmatric.dy.all")) vwcmatric.dy.all <- get_Response_aggL(sc, sw_vwcmatric, tscale = "dyAll", scaler = 1, FUN = weighted.mean, weights = layers_width, x = runData, st = simTime, st2 = simTime2, topL = topL, bottomL = bottomL) if(!exists("swpmatric.dy.all")) swpmatric.dy.all <- get_SWPmatric_aggL(vwcmatric.dy.all, texture, sand, clay) #swp.dy.all is required to get all layers @@ -4361,7 +4528,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer rm(res.dry, wet, wet_crit, AtLeastOneWet, AllWet, AllDry) } #38 - if(any(simulation_timescales=="daily") & aon$dailySuitablePeriodsDuration){ + if(aon$dailySuitablePeriodsDuration){ if(print.debug) print("Aggregation of dailySuitablePeriodsDuration") if(!exists("vwcmatric.dy")) vwcmatric.dy <- get_Response_aggL(sc, sw_vwcmatric, tscale = "dy", scaler = 1, FUN = weighted.mean, weights = layers_width, x = runData, st = simTime, st2 = simTime2, topL = topL, bottomL = bottomL) if(!exists("swpmatric.dy")) swpmatric.dy <- get_SWPmatric_aggL(vwcmatric.dy, texture, sand, clay) @@ -4392,7 +4559,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer rm(wet.top, wet.bottom, durations.top, snowfree, niceTemp) } #39 - if(any(simulation_timescales=="daily") & aon$dailySuitablePeriodsAvailableWater){ + if(aon$dailySuitablePeriodsAvailableWater){ if(print.debug) print("Aggregation of dailySuitablePeriodsAvailableWater") if(!exists("swcbulk.dy")) swcbulk.dy <- get_Response_aggL(sc, sw_swcbulk, tscale = "dy", scaler = 10, FUN = sum, x = runData, st = simTime, st2 = simTime2, topL = topL, bottomL = bottomL) if(!exists("temp.dy")) temp.dy <- get_Temp_dy(sc, runData, simTime) @@ -4421,7 +4588,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer rm(swa.top, swa.bottom, suitable) } #40 - if(any(simulation_timescales=="daily") & aon$dailySuitablePeriodsDrySpells){ + if(aon$dailySuitablePeriodsDrySpells){ if(print.debug) print("Aggregation of dailySuitablePeriodsDrySpells") if(!exists("vwcmatric.dy.all")) vwcmatric.dy.all <- get_Response_aggL(sc, sw_vwcmatric, tscale = "dyAll", scaler = 1, FUN = weighted.mean, weights = layers_width, x = runData, st = simTime, st2 = simTime2, topL = topL, bottomL = bottomL) if(!exists("swpmatric.dy.all")) swpmatric.dy.all <- get_SWPmatric_aggL(vwcmatric.dy.all, texture, sand, clay) #swp.dy.all is required to get all layers @@ -4461,7 +4628,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer rm(dry.top, dry.bottom, suitable, dry_crit, adjDays, durationDryPeriods.min) } #41 - if(any(simulation_timescales=="daily") & aon$dailySWPdrynessDurationDistribution){#cummulative frequency distribution of durations of dry soils in each of the four seasons and for each of the SWP.crit + if(aon$dailySWPdrynessDurationDistribution){#cummulative frequency distribution of durations of dry soils in each of the four seasons and for each of the SWP.crit if(print.debug) print("Aggregation of dailySWPdrynessDurationDistribution") if(!exists("vwcmatric.dy")) vwcmatric.dy <- get_Response_aggL(sc, sw_vwcmatric, tscale = "dy", scaler = 1, FUN = weighted.mean, weights = layers_width, x = runData, st = simTime, st2 = simTime2, topL = topL, bottomL = bottomL) if(!exists("swpmatric.dy")) swpmatric.dy <- get_SWPmatric_aggL(vwcmatric.dy, texture, sand, clay) @@ -4493,7 +4660,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer if(length(bottomL) > 0 && !identical(bottomL, 0)) rm(wet.bottom) } #42 - if(any(simulation_timescales=="daily") && aon$dailySWPdrynessEventSizeDistribution){ + if(aon$dailySWPdrynessEventSizeDistribution){ if(print.debug) print("Aggregation of dailySWPdrynessEventSizeDistribution") if(!exists("vwcmatric.dy")) vwcmatric.dy <- get_Response_aggL(sc, sw_vwcmatric, tscale = "dy", scaler = 1, FUN = weighted.mean, weights = layers_width, x = runData, st = simTime, st2 = simTime2, topL = topL, bottomL = bottomL) if(!exists("swpmatric.dy")) swpmatric.dy <- get_SWPmatric_aggL(vwcmatric.dy, texture, sand, clay) @@ -4542,7 +4709,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer if(length(bottomL) > 0 && !identical(bottomL, 0)) rm(dry.bottom, events.bottom) } #43 - if(any(simulation_timescales=="daily") && aon$dailySWPdrynessIntensity) { + if(aon$dailySWPdrynessIntensity) { if(print.debug) print("Aggregation of dailySWPdrynessIntensity") if(!exists("vwcmatric.dy")) vwcmatric.dy <- get_Response_aggL(sc, sw_vwcmatric, tscale = "dy", scaler = 1, FUN = weighted.mean, weights = layers_width, x = runData, st = simTime, st2 = simTime2, topL = topL, bottomL = bottomL) @@ -4577,7 +4744,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer } #43.2 - if(any(simulation_timescales=="daily") & aon$dailyThermalDrynessStress){ + if(aon$dailyThermalDrynessStress){ if(print.debug) print("Aggregation of dailyThermalDrynessStress") if(!exists("vwcmatric.dy.all")) vwcmatric.dy.all <- get_Response_aggL(sc, sw_vwcmatric, tscale = "dyAll", scaler = 1, FUN = weighted.mean, weights = layers_width, x = runData, st = simTime, st2 = simTime2, topL = topL, bottomL = bottomL) if(!exists("swpmatric.dy.all")) swpmatric.dy.all <- get_SWPmatric_aggL(vwcmatric.dy.all, texture, sand, clay) #swp.dy.all is required to get all layers @@ -4623,7 +4790,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer #---Aggregation: Mean monthly values #44 - if(any(simulation_timescales=="monthly") & aon$monthlyTemp){ + if(aon$monthlyTemp){ if(print.debug) print("Aggregation of monthlyTemp") if(!exists("temp.mo")) temp.mo <- get_Temp_mo(sc, runData, simTime) @@ -4634,7 +4801,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer nv <- nv + 12 } #45 - if(any(simulation_timescales=="monthly") & aon$monthlyPPT){ + if(aon$monthlyPPT){ if(print.debug) print("Aggregation of monthlyPPT") if(!exists("prcp.mo")) prcp.mo <- get_PPT_mo(sc, runData, simTime) @@ -4645,7 +4812,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer nv <- nv + 12 } #46 - if(any(simulation_timescales=="monthly") & aon$monthlySnowpack){ + if(aon$monthlySnowpack){ if(print.debug) print("Aggregation of monthlySnowpack") if(!exists("SWE.mo")) SWE.mo <- get_SWE_mo(sc, runData, simTime) @@ -4656,7 +4823,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer nv <- nv + 12 } #47 - if(any(simulation_timescales == "monthly") & aon$monthlySoilTemp) { + if(aon$monthlySoilTemp) { if(print.debug) print("Aggregation of monthlySoilTemp") if(!exists("soiltemp.mo")) soiltemp.mo <- get_Response_aggL(sc, sw_soiltemp, tscale = "mo", scaler = 1, FUN = weighted.mean, weights = layers_width, x = runData, st = simTime, st2 = simTime2, topL = topL, bottomL = bottomL) @@ -4668,7 +4835,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer nv <- nv_new } #48 - if(any(simulation_timescales=="monthly") & aon$monthlyRunoff){ + if(aon$monthlyRunoff){ if(print.debug) print("Aggregation of monthlyRunoff") if(!exists("runoff.mo")) runoff.mo <- get_Runoff_mo(sc, runData, simTime) @@ -4679,7 +4846,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer nv <- nv + 12 } #49 - if(any(simulation_timescales=="monthly") & aon$monthlyHydraulicRedistribution){ + if(aon$monthlyHydraulicRedistribution){ if(print.debug) print("Aggregation of monthlyHydraulicRedistribution") if(!exists("hydred.mo")) hydred.mo <- get_Response_aggL(sc, sw_hd, tscale = "mo", scaler = 10, FUN = sum, x = runData, st = simTime, st2 = simTime2, topL = topL, bottomL = bottomL) @@ -4691,7 +4858,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer nv <- nv_new } #50 - if(any(simulation_timescales=="monthly") & aon$monthlyInfiltration){ + if(aon$monthlyInfiltration){ if(print.debug) print("Aggregation of monthlyInfiltration") if(!exists("inf.mo")) inf.mo <- get_Inf_mo(sc, runData, simTime) @@ -4702,7 +4869,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer nv <- nv + 12 } #51 - if(any(simulation_timescales=="monthly") & aon$monthlyDeepDrainage){ + if(aon$monthlyDeepDrainage){ if(print.debug) print("Aggregation of monthlyDeepDrainage") if(!exists("deepDrain.mo")) deepDrain.mo <- get_DeepDrain_mo(sc, runData, simTime) @@ -4713,7 +4880,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer nv <- nv + 12 } #52 - if(any(simulation_timescales=="monthly") & aon$monthlySWPmatric){ + if(aon$monthlySWPmatric){ if(print.debug) print("Aggregation of monthlySWPmatric") if(!exists("vwcmatric.mo")) vwcmatric.mo <- get_Response_aggL(sc, sw_vwcmatric, tscale = "mo", scaler = 1, FUN = weighted.mean, weights = layers_width, x = runData, st = simTime, st2 = simTime2, topL = topL, bottomL = bottomL) if(!exists("swpmatric.mo")) swpmatric.mo <- get_SWPmatric_aggL(vwcmatric.mo, texture, sand, clay) @@ -4724,7 +4891,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer nv <- nv+24 } #53 a.) - if(any(simulation_timescales=="monthly") & aon$monthlyVWCbulk){ + if(aon$monthlyVWCbulk){ if(print.debug) print("Aggregation of monthlyVWC") if(!exists("vwcbulk.mo")) vwcbulk.mo <- get_Response_aggL(sc, sw_vwcbulk, tscale = "mo", scaler = 1, FUN = weighted.mean, weights = layers_width, x = runData, st = simTime, st2 = simTime2, topL = topL, bottomL = bottomL) @@ -4736,7 +4903,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer nv <- nv_new } #53 b.) - if(any(simulation_timescales=="monthly") & aon$monthlyVWCmatric){ + if(aon$monthlyVWCmatric){ if(print.debug) print("Aggregation of monthlyVWCmatric") if(!exists("vwcmatric.mo")) vwcmatric.mo <- get_Response_aggL(sc, sw_vwcmatric, tscale = "mo", scaler = 1, FUN = weighted.mean, weights = layers_width, x = runData, st = simTime, st2 = simTime2, topL = topL, bottomL = bottomL) @@ -4748,7 +4915,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer nv <- nv_new } #54 - if(any(simulation_timescales=="monthly") & aon$monthlySWCbulk){ + if(aon$monthlySWCbulk){ if(print.debug) print("Aggregation of monthlySWCbulk") if(!exists("swcbulk.mo")) swcbulk.mo <- get_Response_aggL(sc, sw_swcbulk, tscale = "mo", scaler = 10, FUN = sum, x = runData, st = simTime, st2 = simTime2, topL = topL, bottomL = bottomL) @@ -4760,7 +4927,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer nv <- nv_new } #55 - if(any(simulation_timescales=="monthly") & aon$monthlySWAbulk){ + if(aon$monthlySWAbulk){ if(print.debug) print("Aggregation of monthlySWA") if(!exists("vwcmatric.mo")) vwcmatric.mo <- get_Response_aggL(sc, sw_vwcmatric, tscale = "mo", scaler = 1, FUN = weighted.mean, weights = layers_width, x = runData, st = simTime, st2 = simTime2, topL = topL, bottomL = bottomL) @@ -4795,7 +4962,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer rm(VWCcritsT, VWCcritsB, temp_top_mo, temp_top_mean, temp_top_sd, temp_bottom_mo, temp_bottom_mean, temp_bottom_sd) } #56 - if(any(simulation_timescales=="monthly") & aon$monthlyTranspiration){ + if(aon$monthlyTranspiration){ if(print.debug) print("Aggregation of monthlyTranspiration") if(!exists("transp.mo")) transp.mo <- get_Response_aggL(sc, sw_transp, tscale = "mo", scaler = 10, FUN = sum, x = runData, st = simTime, st2 = simTime2, topL = topL, bottomL = bottomL) @@ -4807,7 +4974,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer nv <- nv_new } #57 - if(any(simulation_timescales=="monthly") & aon$monthlySoilEvaporation){ + if(aon$monthlySoilEvaporation){ if(print.debug) print("Aggregation of monthlySoilEvaporation") if(!exists("Esoil.mo")) Esoil.mo <- get_Response_aggL(sc, sw_evsoil, tscale = "mo", scaler = 10, FUN = sum, x = runData, st = simTime, st2 = simTime2, topL = topL, bottomL = bottomL) @@ -4819,7 +4986,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer nv <- nv + 12 } #58 - if(any(simulation_timescales=="monthly") & aon$monthlyAET){ + if(aon$monthlyAET){ if(print.debug) print("Aggregation of monthlyAET") if(!exists("AET.mo")) AET.mo <- get_AET_mo(sc, runData, simTime) @@ -4830,7 +4997,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer nv <- nv + 12 } #59 - if(any(simulation_timescales=="monthly") & aon$monthlyPET){ + if(aon$monthlyPET){ if(print.debug) print("Aggregation of monthlyPET") if(!exists("PET.mo")) PET.mo <- get_PET_mo(sc, runData, simTime) @@ -4841,7 +5008,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer nv <- nv + 12 } #59.2 - if (any(simulation_timescales == "monthly") && aon$monthlyVPD) { + if (aon$monthlyVPD) { if (print.debug) print("Aggregation of monthlyVPD") if (!exists("temp.mo")) temp.mo <- get_Temp_mo(sc, runData, simTime) if (!exists("vpd.mo")) vpd.mo <- get_VPD_mo(sc, temp.mo, xin = swRunScenariosData, st2 = simTime2) @@ -4853,7 +5020,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer nv <- nv + 12 } #60 - if(any(simulation_timescales=="monthly") & aon$monthlyAETratios){ + if(aon$monthlyAETratios){ if(print.debug) print("Aggregation of monthlyAETratios") if(!exists("AET.mo")) AET.mo <- get_AET_mo(sc, runData, simTime) if(!exists("Esoil.mo")) Esoil.mo <- get_Response_aggL(sc, sw_evsoil, tscale = "mo", scaler = 10, FUN = sum, x = runData, st = simTime, st2 = simTime2, topL = topL, bottomL = bottomL) @@ -4873,7 +5040,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer nv <- nv_new } #61 - if(any(simulation_timescales=="monthly") & aon$monthlyPETratios){ + if(aon$monthlyPETratios){ if(print.debug) print("Aggregation of monthlyPETratios") if(!exists("PET.mo")) PET.mo <- get_PET_mo(sc, runData, simTime) if(!exists("Esoil.mo")) Esoil.mo <- get_Response_aggL(sc, sw_evsoil, tscale = "mo", scaler = 10, FUN = sum, x = runData, st = simTime, st2 = simTime2, topL = topL, bottomL = bottomL) @@ -4897,7 +5064,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer #---Aggregation: Potential regeneration #regeneration: accountNSHemispheres_agg #62 - if(any(simulation_timescales=="daily") & aon$dailyRegeneration_bySWPSnow) { + if(aon$dailyRegeneration_bySWPSnow) { if(print.debug) print("Aggregation of dailyRegeneration_bySWPSnow") if(!exists("vwcmatric.dy.all")) vwcmatric.dy.all <- get_Response_aggL(sc, sw_vwcmatric, tscale = "dyAll", scaler = 1, FUN = weighted.mean, weights = layers_width, x = runData, st = simTime, st2 = simTime2, topL = topL, bottomL = bottomL) if(!exists("swpmatric.dy.all")) swpmatric.dy.all <- get_SWPmatric_aggL(vwcmatric.dy.all, texture, sand, clay) @@ -4915,7 +5082,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer #Artemisia tridentata regeneration according to factor model (2012-02-15, drs), call for every regeneration species #accountNSHemispheres_agg: param$Doy_SeedDispersalStart0 must be set correctly\ #63 - if(any(simulation_timescales=="daily") & aon$dailyRegeneration_GISSM & no.species_regeneration > 0){ + if(aon$dailyRegeneration_GISSM & no.species_regeneration > 0){ # Schlaepfer, D.R., Lauenroth, W.K. & Bradford, J.B. (2014). Modeling regeneration responses of big sagebrush (Artemisia tridentata) to abiotic conditions. Ecol Model, 286, 66-77. if(print.debug) print("Aggregation of dailyRegeneration_GISSM") #---Access daily data, which do not depend on specific species parameters, i.e., start of season @@ -4936,38 +5103,52 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer for (sp in seq_len(no.species_regeneration)) { param <- data.frame(t(param.species_regeneration[,sp])) - #Regeneration year=RY: RYdoy=1 == start of seed dispersal = start of 'regeneration year' - Doy_SeedDispersalStart <- max(round(param$Doy_SeedDispersalStart0 + param$SeedDispersalStart_DependencyOnMeanTempJanuary * TmeanJan, 0) %% 365, 1) - moveByDays <- ifelse(Doy_SeedDispersalStart == 1, 1, max(as.numeric(ISOdate(simTime$useyrs[1] - 1, 12, 31, tz = "UTC") - ISOdate(simTime$useyrs[1] - 1, 1, 1, tz = "UTC")) + 1 - (Doy_SeedDispersalStart - 1) %% 365, 1)) + #Regeneration year=RY: RYdoy=1 == start of seed dispersal = start of 'regeneration year' + temp <- param$Doy_SeedDispersalStart0 + + param$SeedDispersalStart_DependencyOnMeanTempJanuary * TmeanJan + Doy_SeedDispersalStart <- as.integer(max(round(temp, 0) %% 365, 1)) + + moveByDays <- if (Doy_SeedDispersalStart > 1) { + temp <- ISOdate(simTime$useyrs[1] - 1, 12, 31, tz = "UTC") - + ISOdate(simTime$useyrs[1] - 1, 1, 1, tz = "UTC") + 1 - + (Doy_SeedDispersalStart - 1) + as.integer(max(c(as.numeric(temp) %% 365, 1))) + } else { + 1L + } + #Calculate regeneration year dates + et <- simTime$no.usedy + itail <- (et - moveByDays + 1):et if (startyr > simstartyr) { #start earlier to complete RY - RY.index.usedy <- c(((st <- simTime$index.usedy[1])-moveByDays):(st-1), simTime$index.usedy[-(((et <- length(simTime$index.usedy))-moveByDays+1):et)]) #index indicating which rows of the daily SoilWat output is used + st <- simTime$index.usedy[1] + RY.index.usedy <- c((st - moveByDays):(st - 1), simTime$index.usedy[-itail]) #index indicating which rows of the daily SoilWat output is used RYyear_ForEachUsedDay <- simTime2$year_ForEachUsedDay #'regeneration year' for each used day RYdoy_ForEachUsedDay <- simTime2$doy_ForEachUsedDay #'doy of the regeneration year' for each used day } else if (!(startyr > simstartyr)) { #start later to get a complete RY - RY.index.usedy <- simTime$index.usedy[-c(1:(Doy_SeedDispersalStart - 1), (((et <- length(simTime$index.usedy))-moveByDays+1):et))] - RYyear_ForEachUsedDay <- simTime2$year_ForEachUsedDay[-which(simTime2$year_ForEachUsedDay == simTime2$year_ForEachUsedDay[1])] - RYdoy_ForEachUsedDay <- simTime2$doy_ForEachUsedDay[-which(simTime2$year_ForEachUsedDay == simTime2$year_ForEachUsedDay[1])] + RY.index.usedy <- simTime$index.usedy[-c(1:(Doy_SeedDispersalStart - 1), itail)] + temp <- which(simTime2$year_ForEachUsedDay == simTime2$year_ForEachUsedDay[1]) + RYyear_ForEachUsedDay <- simTime2$year_ForEachUsedDay[-temp] + RYdoy_ForEachUsedDay <- simTime2$doy_ForEachUsedDay[-temp] } + RY.useyrs <- unique(RYyear_ForEachUsedDay) #list of 'regeneration years' that are used for aggregation + # normal year for each used 'doy of the regeneration year' - et <- length(RYyear_ForEachUsedDay) - year_ForEachUsedRYDay <- c(rep(simTime$useyrs[1] - 1, times = moveByDays), - RYyear_ForEachUsedDay[-((et - moveByDays + 1):et)]) + RY_N_usedy <- length(RY.index.usedy) + itail <- (RY_N_usedy - moveByDays + 1):RY_N_usedy + year_ForEachUsedRYDay <- c(rep(simTime$useyrs[1] - 1, moveByDays), + RYyear_ForEachUsedDay[-itail]) # normal doy for each used 'doy of the regeneration year' st <- simTime$index.usedy[1] - et <- length(RYdoy_ForEachUsedDay) doy_ForEachUsedRYDay <- c((st - moveByDays):(st - 1), - RYdoy_ForEachUsedDay[-((et - moveByDays + 1):et)]) - RY.useyrs <- unique(RYyear_ForEachUsedDay) #list of 'regeneration years' that are used for aggregation + RYdoy_ForEachUsedDay[-itail]) #Access daily data, the first time and afterwards only if Doy_SeedDispersalStart is different from value of previous species if (sp == 1 || Doy_SeedDispersalStart != prev.Doy_SeedDispersalStart) { - swp <- swpmatric.dy.all$val[RY.index.usedy, 2 + ld] - if (length(ld) == 1) - swp <- matrix(swp, ncol=1) + swp <- swpmatric.dy.all$val[RY.index.usedy, 2 + ld, drop = FALSE] snow <- temp.snow[RY.index.usedy, 3]*10 #mm swe in snowpack airTminSnow <- ifelse(snow > 0, param$Temp_ExperiencedUnderneathSnowcover, temp.temp[RY.index.usedy, 4]) airTmax <- temp.temp[RY.index.usedy, 3] @@ -5019,32 +5200,22 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer swp.TopMean = swp.TopMean, TmeanJan = TmeanJan, param = param)) - Germination_RestrictedByTimeToGerminate <- rep(FALSE, times = length(Germination_TimeToGerminate)) + Germination_RestrictedByTimeToGerminate <- rep(FALSE, RY_N_usedy) Germination_RestrictedByTimeToGerminate[Germination_DuringFavorableConditions & is.na(Germination_TimeToGerminate)] <- TRUE #---3. Successful germinations GerminationSuccess_Initiated <- !is.na(Germination_TimeToGerminate) - temp <- padded <- rep(FALSE, times=length(GerminationSuccess_Initiated)) - germ.starts <- seq_along(temp)[GerminationSuccess_Initiated] - germ.durs <- Germination_TimeToGerminate[GerminationSuccess_Initiated] - 1 + germ.starts <- which(GerminationSuccess_Initiated) + germ.durs <- Germination_TimeToGerminate[germ.starts] - 1 if (param$GerminationPeriods_0ResetOr1Resume == 1) { - temp.wait <- na.exclude(unlist(lapply(seq_along(temp), function(t) { - if (!is.na(Germination_TimeToGerminate[t])) { - t1 <- LengthDays_FavorableConditions[t:length(LengthDays_FavorableConditions)] - t2 <- na.exclude(t1) - t3 <- which(t2[Germination_TimeToGerminate[t]] == t1)[1] - sum(is.na(t1[1:t3])) - } else { - NA - } - }))) - germ.durs <- germ.durs + temp.wait + germ.durs <- germ.durs + germination_wait_times(Germination_TimeToGerminate, + LengthDays_FavorableConditions) } emergence.doys <- germ.starts + germ.durs #index of start of successful germinations + time to germinate (including wait time during unfavorable conditions if 'resume') - temp[emergence.doys] <- TRUE - padded[!GerminationSuccess_Initiated] <- NA - Germination_Emergence.doys <- napredict(na.action(na.exclude(padded)), emergence.doys) - Germination_Emergence <- temp + Germination_Emergence <- rep(FALSE, RY_N_usedy) + Germination_Emergence[emergence.doys] <- TRUE + Germination_Emergence.doys <- rep(NA, RY_N_usedy) + Germination_Emergence.doys[GerminationSuccess_Initiated] <- emergence.doys #----SEEDLING SURVIVAL @@ -5065,20 +5236,22 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer #---2. Grow and kill the seedlings SeedlingSurvival_1stSeason <- Seedling_Starts <- Germination_Emergence #TRUE=seedling that germinated on that day and survives until end of season; FALSE=no germination or seedling dies during the first season - SeedlingMortality_CausesByYear <- matrix(data=0, nrow=length(RY.useyrs), ncol=9) - colnames(SeedlingMortality_CausesByYear) <- paste("Seedlings1stSeason.Mortality.", c("UnderneathSnowCover", "ByTmin", "ByTmax", "ByChronicSWPMax", "ByChronicSWPMin", "ByAcuteSWPMin", - "DuringStoppedGrowth.DueSnowCover", "DuringStoppedGrowth.DueTmin", "DuringStoppedGrowth.DueTmax"), sep="") + SeedlingSurvival_1stSeason[] <- SeedlingSurvival_1stSeason # deep copy because Rcpp-version of get_KilledBySoilLayers changes in place which has otherwise side effects on Seedling_Starts and Germination_Emergence + SeedlingMortality_CausesByYear <- matrix(0, nrow = length(RY.useyrs), ncol = 9) + colnames(SeedlingMortality_CausesByYear) <- paste0("Seedlings1stSeason.Mortality.", c("UnderneathSnowCover", "ByTmin", "ByTmax", "ByChronicSWPMax", "ByChronicSWPMin", "ByAcuteSWPMin", + "DuringStoppedGrowth.DueSnowCover", "DuringStoppedGrowth.DueTmin", "DuringStoppedGrowth.DueTmax")) for (y in seq_along(RY.useyrs)) {#for each year - RYDoys_SeedlingStarts_ThisYear <- which(Seedling_Starts[index.thisYear <- RYyear_ForEachUsedDay == RY.useyrs[y]]) + index.thisYear <- RYyear_ForEachUsedDay == RY.useyrs[y] + RYDoys_SeedlingStarts_ThisYear <- which(Seedling_Starts[index.thisYear]) if (length(RYDoys_SeedlingStarts_ThisYear) > 0) {#if there are any germinations #init values for this year no.days <- sum(index.thisYear) thisYear_SeedlingMortality_UnderneathSnowCover <- SeedlingMortality_UnderneathSnowCover[index.thisYear] thisYear_SeedlingMortality_ByTmin <- SeedlingMortality_ByTmin[index.thisYear] thisYear_SeedlingMortality_ByTmax <- SeedlingMortality_ByTmax[index.thisYear] - thisYear_SeedlingMortality_ByChronicSWPMax <- SeedlingMortality_ByChronicSWPMax[index.thisYear, ] - thisYear_SeedlingMortality_ByChronicSWPMin <- SeedlingMortality_ByChronicSWPMin[index.thisYear, ] - thisYear_SeedlingMortality_ByAcuteSWPMin <- SeedlingMortality_ByAcuteSWPMin[index.thisYear, ] + thisYear_SeedlingMortality_ByChronicSWPMax <- SeedlingMortality_ByChronicSWPMax[index.thisYear, , drop = FALSE] + thisYear_SeedlingMortality_ByChronicSWPMin <- SeedlingMortality_ByChronicSWPMin[index.thisYear, , drop = FALSE] + thisYear_SeedlingMortality_ByAcuteSWPMin <- SeedlingMortality_ByAcuteSWPMin[index.thisYear, , drop = FALSE] thisYear_SeedlingGrowth_AbsenceOfSnowCover <- SeedlingGrowth_AbsenceOfSnowCover[index.thisYear] thisYear_SeedlingGrowth_AtAboveTmin <- SeedlingGrowth_AtAboveTmin[index.thisYear] thisYear_SeedlingGrowth_AtBelowTmax <- SeedlingGrowth_AtBelowTmax[index.thisYear] @@ -5101,32 +5274,33 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer #Snow cover thisSeedlingGrowth_AbsenceOfSnowCover <- calculate_SuitableGrowthThisYear_UnderCondition(favorable.conditions=thisSeedlingGrowing & thisYear_SeedlingGrowth_AbsenceOfSnowCover, consequences.unfavorable=param$SeedlingGrowth_0StopOr1Resume) temp <- !thisSeedlingGrowth_AbsenceOfSnowCover[index.thisSeedlingSeason] - if (sum(temp) > 0) + if (any(temp)) stopped_byCauses_onRYdoy["Seedlings1stSeason.Mortality.DuringStoppedGrowth.DueSnowCover"] <- sg_RYdoy + which(temp)[1] #Minimum temperature thisSeedlingGrowth_AtAboveTmin <- calculate_SuitableGrowthThisYear_UnderCondition(favorable.conditions=thisSeedlingGrowing & thisYear_SeedlingGrowth_AtAboveTmin, consequences.unfavorable=param$SeedlingGrowth_0StopOr1Resume) temp <- !thisSeedlingGrowth_AtAboveTmin[index.thisSeedlingSeason] - if (sum(temp) > 0) + if (any(temp)) stopped_byCauses_onRYdoy["Seedlings1stSeason.Mortality.DuringStoppedGrowth.DueTmin"] <- sg_RYdoy + which(temp)[1] #Maximum temperature thisSeedlingGrowth_AtBelowTmax <- calculate_SuitableGrowthThisYear_UnderCondition(favorable.conditions=thisSeedlingGrowing & thisYear_SeedlingGrowth_AtBelowTmax, consequences.unfavorable=param$SeedlingGrowth_0StopOr1Resume) temp <- !thisSeedlingGrowth_AtBelowTmax[index.thisSeedlingSeason] - if (sum(temp) > 0) + if (any(temp)) stopped_byCauses_onRYdoy["Seedlings1stSeason.Mortality.DuringStoppedGrowth.DueTmax"] <- sg_RYdoy + which(temp)[1] #Updated days of growth or surviving thisSeedlingGrowing <- thisSeedlingGrowing & thisSeedlingGrowth_AbsenceOfSnowCover & thisSeedlingGrowth_AtAboveTmin & thisSeedlingGrowth_AtBelowTmax thisSeedlingLivingButNotGrowing <- !thisSeedlingGrowing - if (sg_RYdoy > 1) thisSeedlingLivingButNotGrowing[seq_len(sg_RYdoy - 1)] <- FALSE #seedling germinated on sg_RYdoy, hence it cannot live before germination day + if (sg_RYdoy > 1) + thisSeedlingLivingButNotGrowing[seq_len(sg_RYdoy - 1)] <- FALSE #seedling germinated on sg_RYdoy, hence it cannot live before germination day #Book-keeping survival under above-ground conditions temp <- thisYear_SeedlingMortality_UnderneathSnowCover[index.thisSeedlingSeason] - if (sum(temp) > 0) + if (any(temp)) killed_byCauses_onRYdoy["Seedlings1stSeason.Mortality.UnderneathSnowCover"] <- sg_RYdoy + which(temp)[1] - 1 temp <- thisYear_SeedlingMortality_ByTmin[index.thisSeedlingSeason] - if (sum(temp) > 0) + if (any(temp)) killed_byCauses_onRYdoy["Seedlings1stSeason.Mortality.ByTmin"] <- sg_RYdoy + which(temp)[1] - 1 temp <- thisYear_SeedlingMortality_ByTmax[index.thisSeedlingSeason] - if (sum(temp) > 0) + if (any(temp)) killed_byCauses_onRYdoy["Seedlings1stSeason.Mortality.ByTmax"] <- sg_RYdoy + which(temp)[1] - 1 #If not killed (yet) then grow and check survival below-ground @@ -5138,7 +5312,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer thisSeedlingGrowing_AgeDays <- seq_len(temp) thisSeedlingGrowing_RootingDepth <- SeedlingRootingDepth(thisSeedlingGrowing_AgeDays, param$Seedling_SoilDepth.PO, param$Seedling_SoilDepth.K, param$Seedling_SoilDepth.r) thisSeedling_thisYear_RootingDepth[thisSeedlingGrowing] <- thisSeedlingGrowing_RootingDepth - if (sum(thisSeedlingLivingButNotGrowing, na.rm = TRUE) > 0) { + if (any(thisSeedlingLivingButNotGrowing, na.rm = TRUE)) { #for days when growth stopped then copy relevant soil depth stopg <- addDepths <- rle(thisSeedlingLivingButNotGrowing) RYDoys_stopg <- c(1, cumsum(stopg$lengths)) @@ -5163,19 +5337,19 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer thisSeedling_thisYear_RootingSoilLayers <- SoilLayer_at_SoilDepth(thisSeedling_thisYear_RootingDepth, layers_depth) #Check survival under chronic SWPMax - thisSeedling_thisYear_SeedlingMortality_ByChronicSWPMax <- get_KilledBySoilLayers(relevantLayers=thisSeedling_thisYear_RootingSoilLayers, kill.conditions=thisYear_SeedlingMortality_ByChronicSWPMax) + thisSeedling_thisYear_SeedlingMortality_ByChronicSWPMax <- get_KilledBySoilLayers(thisSeedling_thisYear_RootingSoilLayers, thisYear_SeedlingMortality_ByChronicSWPMax) temp <- thisSeedling_thisYear_SeedlingMortality_ByChronicSWPMax[index.thisSeedlingSeason] - if (sum(temp) > 0) + if (any(temp)) killed_byCauses_onRYdoy["Seedlings1stSeason.Mortality.ByChronicSWPMax"] <- sg_RYdoy + which(temp)[1] - 1 #Check survival under chronic SWPMin - thisSeedling_thisYear_SeedlingMortality_ByChronicSWPMin <- get_KilledBySoilLayers(relevantLayers=thisSeedling_thisYear_RootingSoilLayers, kill.conditions=thisYear_SeedlingMortality_ByChronicSWPMin) + thisSeedling_thisYear_SeedlingMortality_ByChronicSWPMin <- get_KilledBySoilLayers(thisSeedling_thisYear_RootingSoilLayers, thisYear_SeedlingMortality_ByChronicSWPMin) temp <- thisSeedling_thisYear_SeedlingMortality_ByChronicSWPMin[index.thisSeedlingSeason] - if (sum(temp) > 0) + if (any(temp)) killed_byCauses_onRYdoy["Seedlings1stSeason.Mortality.ByChronicSWPMin"] <- sg_RYdoy + which(temp)[1] - 1 #Check survival under acute SWPMin - thisSeedling_thisYear_SeedlingMortality_ByAcuteSWPMin <- get_KilledBySoilLayers(relevantLayers=thisSeedling_thisYear_RootingSoilLayers, kill.conditions=thisYear_SeedlingMortality_ByAcuteSWPMin) + thisSeedling_thisYear_SeedlingMortality_ByAcuteSWPMin <- get_KilledBySoilLayers(thisSeedling_thisYear_RootingSoilLayers, thisYear_SeedlingMortality_ByAcuteSWPMin) temp <- thisSeedling_thisYear_SeedlingMortality_ByAcuteSWPMin[index.thisSeedlingSeason] - if (sum(temp) > 0) + if (any(temp)) killed_byCauses_onRYdoy["Seedlings1stSeason.Mortality.ByAcuteSWPMin"] <- sg_RYdoy + which(temp)[1] - 1 } @@ -5185,11 +5359,13 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer SeedlingMortality_CausesByYear[y, kill.factor] <- SeedlingMortality_CausesByYear[y, kill.factor] + 1 stop.factor <- which.min(stopped_byCauses_onRYdoy) if (any(!is.na(stopped_byCauses_onRYdoy)) && - killed_byCauses_onRYdoy[kill.factor] > stopped_byCauses_onRYdoy[(stop.factor)]) { + killed_byCauses_onRYdoy[kill.factor] > stopped_byCauses_onRYdoy[stop.factor]) { SeedlingMortality_CausesByYear[y, 6+stop.factor] <- SeedlingMortality_CausesByYear[y, 6+stop.factor] + 1 } - SeedlingSurvival_1stSeason[RYyear_ForEachUsedDay == RY.useyrs[y]][sg_RYdoy] <- FALSE + SeedlingSurvival_1stSeason <- setFALSE_SeedlingSurvival_1stSeason( + SeedlingSurvival_1stSeason, RYyear_ForEachUsedDay, + RY.useyrs, y, sg_RYdoy) } } } else {#no germination during this year -> no seedlings to grow or die @@ -5199,13 +5375,17 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer #---Aggregate output dat_gissm1 <- cbind(Germination_Emergence, SeedlingSurvival_1stSeason) - dat_gissm2 <- cbind(!Germination_AtBelowTmax, !Germination_AtAboveTmin, !Germination_AtMoreThanTopSWPmin, !Germination_DuringFavorableConditions, Germination_RestrictedByTimeToGerminate) + dat_gissm2 <- cbind(!Germination_AtBelowTmax, !Germination_AtAboveTmin, + !Germination_AtMoreThanTopSWPmin, !Germination_DuringFavorableConditions, + Germination_RestrictedByTimeToGerminate) #Fraction of years with success - res1.yr <- aggregate(dat_gissm1, by = list(year_ForEachUsedRYDay), FUN = sum) - stemp <- res1.yr[simTime$index.useyr, -1] > 0 - resMeans[nv:(nv+1)] <- apply(stemp, 2, mean) - resSDs[nv:(nv+1)] <- apply(stemp, 2, sd) + index_RYuseyr <- unique(year_ForEachUsedRYDay) %in% simTime$useyr + res1.yr_v0 <- aggregate(dat_gissm1, by = list(year_ForEachUsedRYDay), FUN = sum) + res1.yr <- res1.yr_v0[index_RYuseyr, -1] + stemp <- res1.yr > 0 + resMeans[nv:(nv+1)] <- apply(stemp, 2, mean, na.rm = TRUE) + resSDs[nv:(nv+1)] <- apply(stemp, 2, sd, na.rm = TRUE) #Periods with no successes rleGerm <- rle(stemp[, 1]) if (any(!rleGerm$values)) @@ -5216,19 +5396,21 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer resMeans[(nv+5):(nv+7)] <- quantile(rleSling$lengths[!rleSling$values], probs = c(0.05, 0.5, 0.95), type = 7) #Mean number of days per year with success - resMeans[(nv+8):(nv+9)] <- apply(res1.yr[simTime$index.useyr, -1], 2, mean) - resSDs[(nv+8):(nv+9)] <- apply(res1.yr[simTime$index.useyr, -1], 2, sd) + resMeans[(nv+8):(nv+9)] <- apply(res1.yr, 2, mean) + resSDs[(nv+8):(nv+9)] <- apply(res1.yr, 2, sd) #Days of year (in normal count) of most frequent successes among years: #toDoy <- function(x) sort(ifelse((temp <- x+Doy_SeedDispersalStart-1) > 365, temp-365,temp)) #convert to normal doys res1.dy <- aggregate(dat_gissm1, by = list(doy_ForEachUsedRYDay), FUN = sum) resMeans[(nv+10):(nv+15)] <- get.DoyMostFrequentSuccesses(res1.dy, dat_gissm1) #Mean number of days when germination is restricted due to conditions - res2.yr <- aggregate(dat_gissm2, by = list(year_ForEachUsedRYDay), sum) - resMeans[(nv+16):(nv+20)] <- apply(res2.yr[simTime$index.useyr, -1], 2, mean) - resSDs[(nv+16):(nv+20)] <- apply(res2.yr[simTime$index.useyr, -1], 2, sd) + res2.yr_v0 <- aggregate(dat_gissm2, by = list(year_ForEachUsedRYDay), sum) + res2.yr <- res2.yr_v0[index_RYuseyr, -1] + resMeans[(nv+16):(nv+20)] <- apply(res2.yr, 2, mean) + resSDs[(nv+16):(nv+20)] <- apply(res2.yr, 2, sd) #Mean time to germinate in days - res3.yr <- tapply(Germination_TimeToGerminate, year_ForEachUsedRYDay, mean, na.rm = TRUE) - resMeans[nv+21] <- mean(res3.yr[simTime$index.useyr], na.rm = TRUE) - resSDs[nv+21] <- sd(res3.yr[simTime$index.useyr], na.rm = TRUE) + res3.yr_v0 <- tapply(Germination_TimeToGerminate, year_ForEachUsedRYDay, mean, na.rm = TRUE) + res3.yr <- res3.yr_v0[index_RYuseyr] + resMeans[nv+21] <- mean(res3.yr, na.rm = TRUE) + resSDs[nv+21] <- sd(res3.yr, na.rm = TRUE) #Mean number of days per year of different types of mortalities resMeans[(nv+22):(nv+30)] <- apply(SeedlingMortality_CausesByYear, 2, mean, na.rm = TRUE) #if value==NA, then no germinations that year resSDs[(nv+22):(nv+30)] <- apply(SeedlingMortality_CausesByYear, 2, sd, na.rm = TRUE) #if value==NA, then no germinations that year @@ -5240,7 +5422,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer #Table with data for every year res1.yr.doy <- t(simplify2array(by(dat_gissm1, INDICES=year_ForEachUsedRYDay, FUN=function(x) get.DoyMostFrequentSuccesses(x, dat_gissm1))))[simTime$index.useyr, ] - res.yr <- data.frame(data.frame(res1.yr, res2.yr[, -1], res3.yr)[simTime$index.useyr, ], SeedlingMortality_CausesByYear, res1.yr.doy) + res.yr <- data.frame(data.frame(res1.yr_v0, res2.yr_v0[, -1], res3.yr_v0)[index_RYuseyr, ], SeedlingMortality_CausesByYear, res1.yr.doy) temp.header2 <- c("DaysWith_GerminationSuccess", "DaysWith_SeedlingSurvival1stSeason", "Days_GerminationRestrictedByTmax", "Days_GerminationRestrictedByTmin", "Days_GerminationRestrictedBySWPmin", "Days_GerminationRestrictedByAnyCondition", "Days_GerminationRestrictedByTimeToGerminate", "MeanDays_TimeToGerminate", @@ -5303,7 +5485,7 @@ do_OneSite <- function(i_sim, i_labels, i_SWRunInformation, i_sw_input_soillayer } #Daily Output - if(any(simulation_timescales=="daily") && daily_no > 0){ + if(daily_no > 0){ dailyList <- list() SQLc <- "" #aggregate for each response variable @@ -5561,7 +5743,7 @@ if(actionWithSoilWat && runsN_todo > 0){ "create_experimentals", "create_treatments", "daily_lyr_agg", "daily_no", "datafile.windspeedAtHeightAboveGround", "dbOverallColumns", "dbWeatherDataFile", "debug.dump.objects", "DegreeDayBase", "Depth_TopLayers", - "dir.ex.daymet", "dir.ex.maurer2002", "dir.out", "dir.out.temp", + "dir.code", "dir.ex.daymet", "dir.ex.maurer2002", "dir.out", "dir.out.temp", "dir.prj", "dir.sw.in.tr", "dir.sw.runs", "dirname.sw.runs.weather", "do_OneSite", "do.GetClimateMeans", "done_prior", "endyr", "estabin", "eta.estimate", "exec_c_prefix", "ExpInput_Seperator", "expN", "filebasename", @@ -5603,7 +5785,7 @@ if(actionWithSoilWat && runsN_todo > 0){ #ETA calculation if (!be.quiet) - print(paste("SWSF simulation runs:", runsN_todo, "out of", runsN_total, " runs will be carried out on", workersN, "cores: started at", t1 <- Sys.time())) + print(paste("SWSF simulation runs:", runsN_todo, "out of", runsN_total, "runs will be carried out on", workersN, "cores: started at", t1 <- Sys.time())) inputDataToSave <- list() @@ -5616,6 +5798,7 @@ if(actionWithSoilWat && runsN_todo > 0){ mpi.bcast.cmd(library(RSQLite, quietly = TRUE)) export_objects_to_workers(list.export, list_envs, "mpi") + mpi.bcast.cmd(source(file.path(dir.code, "SWSF_cpp_functions.R"))) if (print.debug) { mpi.bcast.cmd(print(paste("Slave", mpi.comm.rank(), "has", length(ls()), "objects"))) } @@ -5721,6 +5904,7 @@ tryCatch({ snow::clusterEvalQ(cl, library(RSQLite, quietly = TRUE)) export_objects_to_workers(list.export, list_envs, "snow", cl) + snow::clusterEvalQ(cl, source(file.path(dir.code, "SWSF_cpp_functions.R"))) snow::clusterEvalQ(cl, Rsoilwat31::dbW_setConnection(dbFilePath = dbWeatherDataFile)) runs.completed <- foreach(i_sim=runIDs_todo, .combine="+", .inorder=FALSE) %dopar% { @@ -5734,6 +5918,7 @@ tryCatch({ } if (identical(parallel_backend, "multicore")) { + source(file.path(dir.code, "SWSF_cpp_functions.R")) Rsoilwat31::dbW_setConnection(dbFilePath = dbWeatherDataFile) runs.completed <- foreach(i_sim=runIDs_todo, .combine="+", .inorder=FALSE, .noexport=list.noexport) %dopar% { @@ -5746,6 +5931,7 @@ tryCatch({ } } else { #call the simulations in serial + source(file.path(dir.code, "SWSF_cpp_functions.R")) Rsoilwat31::dbW_setConnection(dbFilePath = dbWeatherDataFile) runs.completed <- 0 diff --git a/R/2_SWSF_p5of5_Functions_v51.R b/R/2_SWSF_p5of5_Functions_v51.R index e1f40b69..011679a7 100644 --- a/R/2_SWSF_p5of5_Functions_v51.R +++ b/R/2_SWSF_p5of5_Functions_v51.R @@ -14,19 +14,19 @@ tol <- sqrt(.Machine$double.eps) toln <- sqrt(.Machine$double.neg.eps) #------ Funtions -swsf_read_csv <- compiler::cmpfun(function(file, ...) { +swsf_read_csv <- compiler::cmpfun(function(file, stringsAsFactors = FALSE, ...) { if (requireNamespace("iotools", quietly = TRUE)) { # faster than utils::read.csv temp <- try(iotools::read.csv.raw(file = file, ...), silent = TRUE) if (inherits(temp, "try-error")) { - read.csv(file = file, ...) + read.csv(file = file, stringsAsFactors = stringsAsFactors, ...) } else { names(temp) <- gsub("\"", "", names(temp)) temp } } else { - read.csv(file = file, ...) + read.csv(file = file, stringsAsFactors = stringsAsFactors, ...) } }) @@ -470,6 +470,28 @@ create_filename_for_Maurer2002_NorthAmerica <- compiler::cmpfun(function(X_WGS84 gsub("[[:space:]]", "", paste("data", formatC(28.8125+round((Y_WGS84-28.8125)/0.125,0)*0.125, digits=4, format="f"), formatC(28.8125+round((X_WGS84-28.8125)/0.125,0)*0.125, digits=4, format="f"), sep="_")) }) +#' @examples +#' month1 <- function() as.POSIXlt(seq(from = ISOdate(1980, 1, 1, tz = "UTC"), +#' to = ISOdate(2010, 12, 31, tz = "UTC"), by = "1 day"))$mon + 1 +#' month2 <- function() seq_month_ofeach_day(list(1980, 1, 1), +#' list(2010, 12, 31), tz = "UTC") +#' +#' if (requireNamespace("microbenchmark", quietly = TRUE)) +#' microbenchmark::microbenchmark(month1(), month2()) # barely any difference +#' +seq_month_ofeach_day <- compiler::cmpfun(function(from = list(year = 1900, month = 1, day = 1), + to = list(year = 1900, month = 12, day = 31), tz = "UTC") { + + x <- paste(from[[1]], from[[2]], from[[3]], 12, 0, 0, sep = "-") + from0 <- unclass(as.POSIXct.POSIXlt(strptime(x, "%Y-%m-%d-%H-%M-%OS", tz = tz))) + x <- paste(to[[1]], to[[2]], to[[3]], 12, 0, 0, sep = "-") + to0 <- unclass(as.POSIXct.POSIXlt(strptime(x, "%Y-%m-%d-%H-%M-%OS", tz = tz))) + + res <- seq.int(0, to0 - from0, by = 86400) + from0 + as.POSIXlt.POSIXct(.POSIXct(res, tz = tz))$mon + 1 +}) + + simTiming <- compiler::cmpfun(function(startyr, simstartyr, endyr) { res <- list() #simyrs <- simstartyr:endyr @@ -543,6 +565,13 @@ simTiming_ForEachUsedTimeUnit <- compiler::cmpfun(function(st, sim_tscales, lati #------auxiliary functions + +in_box <- compiler::cmpfun(function(xy, xbounds, ybounds, i_use) { + !i_use & + xy[, 1] >= xbounds[1] & xy[, 1] <= xbounds[2] & + xy[, 2] >= ybounds[1] & xy[, 2] <= ybounds[2] +}) + adjustLayersDepth <- compiler::cmpfun(function(layers_depth, d) round(layers_depth[seq_len(d)])) #The wrapper only handles 1-cm resolution of soil depths (maily because of the trco) getLayersWidth <- compiler::cmpfun(function(layers_depth) diff(c(0, layers_depth))) setLayerSequence <- compiler::cmpfun(function(d) seq_len(d)) @@ -593,59 +622,41 @@ sw_dailyC4_TempVar <- compiler::cmpfun(function(dailyTempMin, dailyTempMean, sim }) sw_SiteClimate_Ambient <- compiler::cmpfun(function(weatherList, year.start, year.end, do.C4vars = FALSE, simTime = NULL, simTime2 = NULL) { - sw.weather.suffix <- as.numeric(names(weatherList)) - itemp <- year.start <= sw.weather.suffix & year.end >= sw.weather.suffix - years <- sw.weather.suffix[itemp] - - tempMean <- tempMin <- tempMax <- ppt <- rep(0, times = 12) - mat <- NULL - #map <- NULL - if (do.C4vars) { - dailyTempMin <- dailyTempMean <- NULL - } - - no.yrs <- length(years) - if (no.yrs > 0) for (y in seq_len(no.yrs)) { - x <- Rsoilwat31::get_swWeatherData(weatherList, years[y])@data[, c("Tmax_C", "Tmin_C", "PPT_cm"), drop = FALSE] - temp.dailyTempMean <- rowMeans(x[, c("Tmax_C", "Tmin_C")]) - - if (do.C4vars) { - dailyTempMin <- c(dailyTempMin, x[, "Tmin_C"]) - dailyTempMean <- c(dailyTempMean, temp.dailyTempMean) - } - - month_forEachDoy <- as.POSIXlt(seq(from = ISOdate(years[y], 1, 1, tz = "UTC"), - to = ISOdate(years[y], 12, 31, tz = "UTC"), - by = "1 day"))$mon + 1 - - tempMean <- tempMean + tapply(temp.dailyTempMean, month_forEachDoy, mean) - tempMin <- tempMin + tapply(x[, "Tmin_C"], month_forEachDoy, mean) - tempMax <- tempMax + tapply(x[, "Tmax_C"], month_forEachDoy, mean) - mat <- c(mat, mean(temp.dailyTempMean)) - - ppt <- ppt + tapply(x[, "PPT_cm"], month_forEachDoy, sum) - #map <- c(map, sum(x[, "PPT_cm"])) - } - - res <- list() - res[["meanMonthlyTempC"]] <- tempMean / no.yrs - res[["minMonthlyTempC"]] <- tempMin / no.yrs - res[["maxMonthlyTempC"]] <- tempMax / no.yrs - res[["meanMonthlyPPTcm"]] <- ppt / no.yrs - - res[["MAP_cm"]] <- sum(res[["meanMonthlyPPTcm"]]) # sum(res[["meanMonthlyPPTcm"]]) == mean(map) - res[["MAT_C"]] <- mean(mat) - - if (do.C4vars) { - res[["dailyTempMin"]] <- dailyTempMin - res[["dailyTempMean"]] <- dailyTempMean - res[["dailyC4vars"]] <- sw_dailyC4_TempVar(dailyTempMin, dailyTempMean, simTime, simTime2) - - } else { - res[["dailyTempMin"]] <- res[["dailyTempMean"]] <- res[["dailyC4vars"]] <- NA - } - - res + x <- Rsoilwat31::dbW_weatherData_to_dataframe(weatherList) + + # Trim to years + years <- as.numeric(unlist(lapply(weatherList, function(x) x@year))) + years <- years[year.start <= years & year.end >= years] + + x <- x[year.start <= x[, "Year"] & year.end >= x[, "Year"], ] + xl <- list( + months = as.POSIXlt(seq(from = ISOdate(years[1], 1, 1, tz = "UTC"), + to = ISOdate(years[length(years)], 12, 31, tz = "UTC"), + by = "1 day"))$mon + 1, + Tmean_C = rowMeans(x[, c("Tmax_C", "Tmin_C")]) + ) + + index <- xl[["months"]] + 100 * x[, "Year"] + temp <- vapply(list(xl[["Tmean_C"]], x[, "Tmin_C"], x[, "Tmax_C"]), function(data) + matrix(tapply(data, index, mean), nrow = 12), + FUN.VALUE = matrix(NA_real_, nrow = 12, ncol = length(years))) + tempPPT <- matrix(tapply(x[, "PPT_cm"], index, sum), nrow = 12) + + list( + meanMonthlyTempC = apply(temp[, , 1], 1, mean), + minMonthlyTempC = apply(temp[, , 2], 1, mean), + maxMonthlyTempC = apply(temp[, , 3], 1, mean), + meanMonthlyPPTcm = apply(tempPPT, 1, mean), + + MAP_cm = sum(tempPPT) / length(years), + MAT_C = mean(xl[["Tmean_C"]]), + + dailyTempMin = if (do.C4vars) x[, "Tmin_C"] else NA, + dailyTempMean = if (do.C4vars) xl[["Tmean_C"]] else NA, + dailyC4vars = if (do.C4vars) { + sw_dailyC4_TempVar(dailyTempMin = x[, "Tmin_C"], dailyTempMean = xl[["Tmean_C"]], simTime, simTime2) + } else NA + ) }) cut0Inf <- compiler::cmpfun(function(x, val = NA) { @@ -1787,15 +1798,15 @@ get.LookupFromTable <- compiler::cmpfun(function(pattern, trtype, tr_input, sw_i fill_empty <- compiler::cmpfun(function(data, pattern, fill, tol = tol) { stopifnot(names(data) %in% c("sw_input", "sw_input_use")) - icols <- sapply(data, function(x) grep(pattern, colnames(x))) - stopifnot(dim(icols)[2L] == 2L) + icols <- sapply(data, function(x) grep(pattern, names(x))) + stopifnot(identical(icols[, "sw_input_use"], icols[, "sw_input"])) + icols <- icols[, "sw_input_use"] - for (k in seq_len(dim(icols)[1L])) { - ic <- icols[k, "sw_input"] - iempty <- is.na(data$sw_input[, ic]) | abs(data$sw_input[, ic]) < tol + for (k in icols) { + iempty <- is.na(data$sw_input[, k]) if (any(iempty)) { - data$sw_input[iempty, ic] <- fill - data$sw_input_use[icols[k, "sw_input_use"]] <- TRUE + data$sw_input[iempty, k] <- fill + data$sw_input_use[k] <- TRUE } } @@ -1857,7 +1868,7 @@ identify_soillayers <- compiler::cmpfun(function(depths, sdepth) { it <- findInterval(depths, sdepth) if (anyNA(it)) { as.integer(na.exclude(it)) - } else if (diff(it) > 0) { + } else if (length(it) > 1 && diff(it) > 0) { (1 + it[1]):(it[2]) } else { it[1] @@ -2971,26 +2982,28 @@ calculate_DurationFavorableConditions <- compiler::cmpfun(function(RYyear, conse get_modifiedHardegree2006NLR <- compiler::cmpfun(function(RYdoy, Estimate_TimeToGerminate, TmeanJan, a, b, c, d, k1_meanJanTemp, k2_meanJanTempXIncubationTemp, k3_IncubationSWP, Tgerm.year, SWPgerm.year, durations, rec.delta = 1, nrec.max = 10L) { for (nrec in seq_len(nrec.max)) { - Estimate_TimeToGerminate <- Estimate_TimeToGerminate.oldEstimate <- max(0, round(Estimate_TimeToGerminate, 0)) + Estimate_TimeToGerminate <- Estimate_TimeToGerminate.oldEstimate <- max(0, round(Estimate_TimeToGerminate)) - Tgerm <- mean(Tgerm.year[RYdoy:(RYdoy + Estimate_TimeToGerminate - 1)], na.rm = TRUE) - SWPgerm <- mean(SWPgerm.year[RYdoy:(RYdoy + Estimate_TimeToGerminate - 1)], na.rm = TRUE) + ids <- RYdoy:(RYdoy + Estimate_TimeToGerminate - 1) + Tgerm <- mean(Tgerm.year[ids], na.rm = TRUE) + SWPgerm <- mean(SWPgerm.year[ids], na.rm = TRUE) temp.c.lim <- -(Tgerm - b) * (d^2 - 1) / d c <- if (c > 0) { - ifelse(c > temp.c.lim, c, temp.c.lim + tol) + if (c > temp.c.lim) c else {temp.c.lim + tol} } else if (c < 0) { - ifelse(c < temp.c.lim, c, temp.c.lim - tol) + if (c < temp.c.lim) c else {temp.c.lim - tol} } #NLR model (eq.5) in Hardegree SP (2006) Predicting Germination Response to Temperature. I. Cardinal-temperature Models and Subpopulation-specific Regression. Annals of Botany, 97, 1115-1125. - temp <- a * exp(-log(2)/log(d)^2 * log(1 + (Tgerm - b)*(d^2 - 1)/(c * d))^2) + temp <- a * exp(-0.693147181 / log(d)^2 * log(1 + (Tgerm - b) * (d^2 - 1) / (c * d))^2) # all.equal(log(2), 0.693147181) + #drs addition to time to germinate dependent on mean January temperature and soil water potential temp <- 1 / temp + k1_meanJanTemp * TmeanJan + k2_meanJanTempXIncubationTemp * TmeanJan * Tgerm + k3_IncubationSWP * SWPgerm - Estimate_TimeToGerminate <- max(1, round(temp, 0) ) + Estimate_TimeToGerminate <- max(1, round(temp) ) #break if convergence or not enough time in this year if (abs(Estimate_TimeToGerminate - Estimate_TimeToGerminate.oldEstimate) <= rec.delta | @@ -3004,7 +3017,7 @@ get_modifiedHardegree2006NLR <- compiler::cmpfun(function(RYdoy, Estimate_TimeTo Estimate_TimeToGerminate } - ifelse(out <= durations[RYdoy] & RYdoy + out <= 365, out, NA) #test whether enough time to germinate + if (out <= durations[RYdoy] & RYdoy + out <= 365) out else NA #test whether enough time to germinate }) #' Function to estimate time to germinate for each day of a given year and conditions (temperature, top soil SWP) @@ -3012,9 +3025,6 @@ calculate_TimeToGerminate_modifiedHardegree2006NLR <- compiler::cmpfun(function( #values for current year index.year <- RYyear_ForEachUsedDay == RYyear conditions <- Germination_DuringFavorableConditions[index.year] - doys.padded <- seq_len(sum(index.year)) - doys.favorable <- doys.padded[conditions] - doys.padded[!conditions] <- NA # determining time to germinate for every day a <- max(tol, param$Hardegree_a) @@ -3026,20 +3036,21 @@ calculate_TimeToGerminate_modifiedHardegree2006NLR <- compiler::cmpfun(function( }) temp.c <- if (param$Hardegree_c != 0) param$Hardegree_c else sign(runif(1) - 0.5) * tol - TimeToGerminate.favorable <- sapply(doys.favorable, FUN = get_modifiedHardegree2006NLR, + TimeToGerminate.favorable <- unlist(lapply(which(conditions), get_modifiedHardegree2006NLR, Estimate_TimeToGerminate = 1, TmeanJan = TmeanJan, a = a, b = b, c = temp.c, d = d, k1_meanJanTemp = param$TimeToGerminate_k1_meanJanTemp, k2_meanJanTempXIncubationTemp = param$TimeToGerminate_k2_meanJanTempXIncubationTemp, k3_IncubationSWP = param$TimeToGerminate_k3_IncubationSWP, Tgerm.year = soilTmeanSnow[index.year], SWPgerm.year = swp.TopMean[index.year], - durations = LengthDays_FavorableConditions[index.year]) #consequences of unfavorable conditions coded in here + durations = LengthDays_FavorableConditions[index.year])) #consequences of unfavorable conditions coded in here - if (length(TimeToGerminate.favorable) == 0) { - TimeToGerminate.favorable <- vector("numeric", length = 0) + res <- rep(NA, length(conditions)) + if (length(TimeToGerminate.favorable) > 0) { + res[conditions] <- TimeToGerminate.favorable } - napredict(na.action(na.exclude(doys.padded)), TimeToGerminate.favorable) + res }) do.vector <- compiler::cmpfun(function(kill.vector, max.duration.before.kill) { @@ -3109,19 +3120,6 @@ SeedlingRootingDepth <- compiler::cmpfun(function(age, P0, K, r) { }) -#' Function that checks whether all relevant (those with roots) soil layers are under conditions of mortality (kill.conditions) for each day of a given year -get_KilledBySoilLayers <- compiler::cmpfun(function(relevantLayers, kill.conditions) { - temp <- cbind(relevantLayers, kill.conditions) - - apply(temp, 1, function(x) { - if (!is.na(x[1])) { - all(as.logical(x[2:(2 + x[1] - 1)])) - } else { - NA - } - }) -}) - get.DoyAtLevel <- compiler::cmpfun(function(x, level) { which(x == level & x > 0) }) diff --git a/R/SWSF_cpp_functions.R b/R/SWSF_cpp_functions.R new file mode 100644 index 00000000..abf5702f --- /dev/null +++ b/R/SWSF_cpp_functions.R @@ -0,0 +1,84 @@ +#------ Remove when this becomes a R package + + +######################## +#------ GISSM functions +# Schlaepfer, D.R., Lauenroth, W.K. & Bradford, J.B. (2014). Modeling regeneration responses of big sagebrush (Artemisia tridentata) to abiotic conditions. Ecol Model, 286, 66-77. + + +#' Determine if all conditions across rooted soil layers are deadly +#' +#' Function that checks whether all relevant (those with roots) soil layers are under conditions of mortality (kill.conditions) for each day of a given year +#' +#' \code{relevantLayers} takes either \code{NA} if no soil layers should be considered +#' (e.g., because not yet germinated), or an integer number between 1 and the number of +#' simulated soil layers. The number indicates the depth to which a seedling has grown +#' roots and over which layers \code{kill.conditions} will be evaluated. +#' +#' @setion: Note: The Rcpp version of the function is about 165x faster than the version +#' previous to commit 6344857a9cdb08acf68fa031c43cf4a596613aad 'Small speed improvements' +#' and about 70x faster than the R version. The Rcpp version also reduced the memory +#' footprint by a factor of 200. +#' +#' @param relevantLayers An integer vector, usually of length 365 or 366 (days). +#' @param kill.conditions A m x p logical matrix with \code{m >= length(relevantLayers)} +#' and p represents the number of simulated soil layers, i.e., +#' \code{p >= max(relevantLayers, na.rm = TRUE)}. +#' +#' @return A logical vector of the length of \code{relevantLayers} with values containing +#' \code{NA} for days when conditions were not evaluated, \code{TRUE} if all +#' relevant soil layers (columns) of \code{kill.conditions} were \code{TRUE}, and with +#' \code{FALSE} otherwise +if (requireNamespace("Rcpp")) { + Rcpp::sourceCpp(file.path(dir.code, "..", "src", "GISSM_get_KilledBySoilLayers.cpp")) + +} else { + get_KilledBySoilLayers <- compiler::cmpfun(function(relevantLayers, kill.conditions) { + vapply(seq_along(relevantLayers), function(k) + all(as.logical(kill.conditions[k, if (is.finite(relevantLayers[k])) seq_len(relevantLayers[k]) else NA, drop = FALSE])), + FUN.VALUE = NA) + }) +} + + +#' @setion: Note: The Rcpp version of the function is about 270x faster for vectors of +#' length 365 and 12,000x faster for vectors of length 11,000 than the R version. +#' The Rcpp version also reduced the memory footprint by a factor of >> 3080. +if (requireNamespace("Rcpp")) { + Rcpp::sourceCpp(file.path(dir.code, "..", "src", "GISSM_germination_wait_times.cpp")) + +} else { + germination_wait_times <- compiler::cmpfun(function(time_to_germinate, duration_fave_cond) { + N <- length(time_to_germinate) + na.exclude(unlist(lapply(seq_len(N), function(t) { + if (is.finite(time_to_germinate[t])) { + t1 <- duration_fave_cond[t:N] + t2 <- na.exclude(t1) + t3 <- which(t2[time_to_germinate[t]] == t1)[1] + sum(is.na(t1[1:t3])) + } else { + NA + } + }))) + }) +} + + +#' @setion: Note: The Rcpp version of the function is about 4x faster than the R version. +#' The Rcpp version also reduced the memory footprint by a factor of 4. +if (requireNamespace("Rcpp")) { + Rcpp::sourceCpp(file.path(dir.code, "..", "src", "GISSM_setFALSE_SeedlingSurvival_1stSeason.cpp")) + +} else { + setFALSE_SeedlingSurvival_1stSeason <- compiler::cmpfun(function(ss1s, ry_year_day, ry_useyrs, y, doy) { + ss1s[ry_year_day == ry_useyrs[y]][doy] <- FALSE + + ss1s + }) +} + + + + +#------ End of GISSM functions +######################## diff --git a/src/GISSM_germination_wait_times.cpp b/src/GISSM_germination_wait_times.cpp new file mode 100644 index 00000000..d55f82fe --- /dev/null +++ b/src/GISSM_germination_wait_times.cpp @@ -0,0 +1,39 @@ +#include +using namespace Rcpp; + +// [[Rcpp::export]] +IntegerVector germination_wait_times(const IntegerVector& time_to_germinate, + const IntegerVector& duration_fave_cond) { + int n = time_to_germinate.size(); + + // throw input errors + if (n != duration_fave_cond.size()) { + throw std::invalid_argument("'germination_wait_times': arguments must be of identical length"); + } + + // calculate + int i, j = 0, t1, n_nas; + int k = sum(!is_na(time_to_germinate)); + IntegerVector out(k); + + for (i = 0; i < n; ++i) { + + if (!IntegerVector::is_na(time_to_germinate[i])) { + // throw error if germination takes too long + if (IntegerVector::is_na(duration_fave_cond[i]) || + time_to_germinate[i] > duration_fave_cond[i]) { + throw std::runtime_error("'germination_wait_times': values of time_to_germinate are larger than those of duration_fave_cond (or the latter are NAs)"); + } + + n_nas = 0; + // count NAs between i and i + time_to_germinate[i] + for (t1 = 0; t1 - n_nas < time_to_germinate[i] && i + t1 < n; ++t1) { + if (IntegerVector::is_na(duration_fave_cond[i + t1])) ++n_nas; + } + + out[j++] = n_nas; + } + } + + return Rcpp::wrap(out); +} diff --git a/src/GISSM_get_KilledBySoilLayers.cpp b/src/GISSM_get_KilledBySoilLayers.cpp new file mode 100644 index 00000000..21453ab9 --- /dev/null +++ b/src/GISSM_get_KilledBySoilLayers.cpp @@ -0,0 +1,30 @@ +#include +using namespace Rcpp; + +// [[Rcpp::export]] +LogicalVector get_KilledBySoilLayers(const IntegerVector& relevantLayers, const LogicalMatrix& kill_conditions) { + int n = relevantLayers.size(); + + // catch errors + if (max(relevantLayers) > kill_conditions.ncol() || + n > kill_conditions.nrow() || + is_true(any(relevantLayers < 0))) { + throw std::invalid_argument("'get_KilledBySoilLayers': inadmissible value(s) of relevantLayers"); + } + + // calculate + int i, j; + IntegerVector killed(n); + for (i = 0; i < n; i++) { + if (IntegerVector::is_na(relevantLayers[i])) { + killed[i] = NA_INTEGER; + + } else { + for (j = 0; j < relevantLayers[i] && kill_conditions(i, j); ++j) ; + + killed[i] = (j == relevantLayers[i] && kill_conditions(i, j - 1) ? 1 : 0); + } + } + + return Rcpp::wrap(killed); +} diff --git a/src/GISSM_setFALSE_SeedlingSurvival_1stSeason.cpp b/src/GISSM_setFALSE_SeedlingSurvival_1stSeason.cpp new file mode 100644 index 00000000..e5701140 --- /dev/null +++ b/src/GISSM_setFALSE_SeedlingSurvival_1stSeason.cpp @@ -0,0 +1,30 @@ +#include +using namespace Rcpp; + +// Note: ss1s is a pointer to the data and the original vector will get altered; one would need for a deep copy: LogicalVector out = clone(ss1s) + +// [[Rcpp::export]] +LogicalVector setFALSE_SeedlingSurvival_1stSeason(LogicalVector& ss1s, + const IntegerVector& ry_year_day, const IntegerVector& ry_useyrs, int y, int doy) { + int i, n = ry_year_day.size(); + + // throw input errors + if (n != ss1s.size() || ry_useyrs.size() < y || + ry_useyrs[y - 1] > max(ry_year_day) || ry_useyrs[y - 1] < min(ry_year_day)) { + throw std::invalid_argument("'setFALSE_SeedlingSurvival_1stSeason': invalid arguments."); + } + + // calculate + for (i = 0; i < n && ry_year_day[i] != ry_useyrs[y - 1]; ++i); // y is a 1-based index to ry_useyrs + + // throw error + if (i + doy > n) { + throw std::runtime_error("'setFALSE_SeedlingSurvival_1stSeason': doy too large for given year 'y'"); + } + + // assumes increasingly sorted vector ry_year_day + // doy is a 1-based index + ss1s[i + doy - 1] = false; + + return Rcpp::wrap(ss1s); +} diff --git a/tests/Test_projects/0_ReferenceOutput/dbTables_Test4_AllOverallAggregations_snow_v1.6.4.sqlite3 b/tests/Test_projects/0_ReferenceOutput/dbTables_Test4_AllOverallAggregations_snow_v1.6.4.sqlite3 new file mode 100644 index 00000000..b020d04d Binary files /dev/null and b/tests/Test_projects/0_ReferenceOutput/dbTables_Test4_AllOverallAggregations_snow_v1.6.4.sqlite3 differ diff --git a/tests/Test_projects/0_ReferenceOutput/dbTables_Test4_AllOverallAggregations_snow_v1.8.2.sqlite3 b/tests/Test_projects/0_ReferenceOutput/dbTables_Test4_AllOverallAggregations_snow_v1.8.2.sqlite3 new file mode 100644 index 00000000..d9e0025c Binary files /dev/null and b/tests/Test_projects/0_ReferenceOutput/dbTables_Test4_AllOverallAggregations_snow_v1.8.2.sqlite3 differ diff --git a/tests/Test_projects/Functions_for_test_projects.R b/tests/Test_projects/Functions_for_test_projects.R index 5c4ed213..ab139292 100644 --- a/tests/Test_projects/Functions_for_test_projects.R +++ b/tests/Test_projects/Functions_for_test_projects.R @@ -309,10 +309,11 @@ compare_test_output <- function(dir_test, dir_ref = NULL, #---Compare field data and report if differences were found ident <- all.equal(x_ref, x_test, tol = tol, scale = if (comp_absolute) 1 else NULL) - if (!isTRUE(ident)) + if (!isTRUE(ident)) { temp <- list(ident) names(temp) <- tocomp_tables[k] diff_msgs <- c(diff_msgs, temp) + } } diff_msgs diff --git a/tests/Test_projects/Run_all_test_projects.R b/tests/Test_projects/Run_all_test_projects.R index 87f55252..40a6008e 100644 --- a/tests/Test_projects/Run_all_test_projects.R +++ b/tests/Test_projects/Run_all_test_projects.R @@ -93,7 +93,8 @@ temp <- if (interactive()) { readline(paste("Which of the", length(tests), "tests should be run", - "('all'; a single number; several numbers separated by commas): ")) + "('all'; a single number; several numbers separated by commas;", + "zero or a negative number to delete any temporary objects): ")) } else which_tests_torun which_tests_torun <- if (!is.na(temp)) { @@ -101,7 +102,11 @@ which_tests_torun <- if (!is.na(temp)) { seq_along(tests) } else { temp <- unique(as.integer(strsplit(gsub("[[:space:]]", "", temp), ",")[[1]])) - intersect(temp, seq_along(tests)) + if (all(temp < 1)) { + -1 + } else { + intersect(temp, seq_along(tests)) + } } } else { seq_along(tests) @@ -113,10 +118,16 @@ source(file.path(dir.test, "Functions_for_test_projects.R"), keep.source = FALSE #---Run projects -out <- run_test_projects(dir.test, tests, dir.old, which_tests_torun, - delete_output, force_delete_output, make_new_ref) +if (any(which_tests_torun > 0)) { + out <- run_test_projects(dir.test, tests, dir.old, which_tests_torun, + delete_output, force_delete_output, make_new_ref) + print(out) + +} else if (which_tests_torun < 1) { + print(paste0(Sys.time(), ": delete temporary disk files of SWSF test projects")) + lapply(tests, delete_test_output) +} -print(out) setwd(dir.old) print(paste0(Sys.time(), ": end of SWSF test projects")) diff --git a/tests/Test_projects/Test1_downscaling_overhaul/2_SWSF_p1of5_Settings_v51.R b/tests/Test_projects/Test1_downscaling_overhaul/2_SWSF_p1of5_Settings_v51.R index 2584d66e..11109d5a 100644 --- a/tests/Test_projects/Test1_downscaling_overhaul/2_SWSF_p1of5_Settings_v51.R +++ b/tests/Test_projects/Test1_downscaling_overhaul/2_SWSF_p1of5_Settings_v51.R @@ -189,16 +189,8 @@ do.ExtractExternalDatasets <- c( "GriddedDailyWeatherFromNRCan_10km_Canada", 0, # can only be used together with database "GriddedDailyWeatherFromNCEPCFSR_Global", 0, # can only be used together with database - #Mean monthly PPT, Tmin, Tmax conditions: if using NEX or GDO-DCP-UC-LLNL, climate condition names must be of the form SCENARIO.GCM with SCENARIO being used for ensembles; if using climatewizard, climate condition names must be equal to what is in the respective directories - #CMIP3 - "ExtractClimateChangeScenarios_CMIP3_ClimateWizardEnsembles_Global", 0, #50-km resolution for mean of 2070-2099 - "ExtractClimateChangeScenarios_CMIP3_ClimateWizardEnsembles_USA", 0, #12-km resolution for mean change between 2070-2099 and 1971-2000 - "ExtractClimateChangeScenarios_CMIP3_BCSD_GDODCPUCLLNL_USA", 0, #1/8-degree resolution - "ExtractClimateChangeScenarios_CMIP3_BCSD_GDODCPUCLLNL_Global", 0, #1/2-degree resolution - #CMIP5 - "ExtractClimateChangeScenarios_CMIP5_BCSD_GDODCPUCLLNL_USA", 1, #1/8-degree resolution - "ExtractClimateChangeScenarios_CMIP5_BCSD_GDODCPUCLLNL_Global", 0, #1/2-degree resolution - "ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_USA", 0, #30-arcsec resolution; requires live internet access + #Monthly PPT, Tmin, Tmax conditions: if using NEX or GDO-DCP-UC-LLNL, climate condition names must be of the form SCENARIO.GCM with SCENARIO being used for ensembles; if using climatewizard, climate condition names must be equal to what is in the respective directories + "ExtractClimateChangeScenarios", 1, #Mean monthly wind, relative humidity, and 100% - sunshine "ExtractSkyDataFromNOAAClimateAtlas_USA", 1, @@ -219,6 +211,20 @@ chunk_size.options <- list( DailyWeatherFromNCEPCFSR_Global = 100 # this is also OS-limited by the number of concurrently open files (on 'unix' platforms, check with 'ulimit -a') ) +opt_climsc_extr <- c( + # for each climate data set from which to extract, add an element like 'dataset1' + # priority of extraction: dataset1, dataset2, ... if multiple sources provide data for a location + # dataset = 'project_source' with + # - project = one string out of c("CMIP3", "CMIP5", "GeoMIP") + # - source = one string out of: + # - "ClimateWizardEnsembles_Global": mean monthly values at 50-km resolution for 2070-2099 + # - "ClimateWizardEnsembles_USA": mean monthly change at 12-km resolution between 2070-2099 and 1971-2000 + # - "BCSD_GDODCPUCLLNL_USA": monthly time series at 1/8-degree resolution + # - "BCSD_GDODCPUCLLNL_Global": monthly time series at 1/2-degree resolution + # - "BCSD_NEX_USA": monthly time series at 30-arcsec resolution; requires live internet access + dataset1 = "CMIP5_BCSD_GDODCPUCLLNL_USA" +) + do.PriorCalculations <- c( "ExtendSoilDatafileToRequestedSoilLayers", 0, "EstimateConstantSoilTemperatureAtUpperAndLowerBoundaryAsMeanAnnualAirTemperature", 1, @@ -252,7 +258,7 @@ rownames(future_yrs) <- make.names(paste0("d", future_yrs[, "delta"], "yrs"), un #------Meta-information of input data datafile.windspeedAtHeightAboveGround <- 2 #SoilWat requires 2 m, but some datasets are at 10 m, e.g., NCEP/CRSF: this value checks windspeed height and if necessary converts to u2 adjust.soilDepth <- FALSE # [FALSE] fill soil layer structure from shallower layer(s) or [TRUE] adjust soil depth if there is no soil texture information for the lowest layers -requested_soil_layers <- seq(10, 100, by = 10) +requested_soil_layers <- c(5, 10, 20, 30, 40, 50, 60, 70, 80, 100, 150) increment_soiltemperature_deltaX_cm <- 5 # If SOILWAT soil temperature is simulated and the solution instable, then the soil profile layer width is increased by this value until a stable solution can be found or total failure is determined #Climate conditions @@ -265,23 +271,23 @@ climate.conditions <- c(climate.ambient, "RCP45.CanESM2", "RCP45.CESM1-CAM5", "R #Downscaling method: monthly scenario -> daily forcing variables #Will be applied to each climate.conditions -downscaling.method <- c("hybrid-delta-3mod") #one or multiple of "raw", "delta" (Hay et al. 2002), "hybrid-delta" (Hamlet et al. 2010), or "hybrid-delta-3mod" - -downscaling.options <- list( - daily_ppt_limit = 1.5, # - monthly_limit = 1.5, # - ppt_type = "detailed", # either "detailed" or "simple" - correct_spline = "attempt", # one of "fail", "none" or "attempt"; only used if extrapol_type is using splines - # - "fail": downscaling fails if spline extrapolations fall outside estimated monthly extremes - # - "none": no correction for extrapolated monthly extreme values, but this will likely fail during correction of extreme daily PPT events - # - "attempt": repeated attempts with jittering data to fit spline extrapolations within estimated monthly extreme values - extrapol_type = "linear_Thermessl2012CC.QMv1b", # one of "linear_Boe", "linear_Thermessl2012CC.QMv1b", "linear_none", "tricub_fmm", "tricub_monoH.FC", "tricub_natural", "normal_anomalies" - # - "linear": Gudmundsson et al. 2012: "If new model values (e.g. from climate projections) are larger than the training values used to estimate the empirical CDF, the correction found for the highest quantile of the training period is used (Boe ?? et al., 2007; Theme??l et al., 2012)." - # - "tricub": I got really large output values, e.g., obs.hist = 54 cm, scen.fut = 64 cm, sbc.fut = 88 cm, hd.fut = 89 cm - # - "linear" (i.e., using Boe et al.'s correction) resulted for the same site to: obs.hist = 54 cm, scen.fut = 64 cm, sbc.fut = 75 cm, hd.fut = 75 cm - # - "normal", but no implemented in qmap: Tohver et al. 2014, Appendix A, p. 6: "... values that are outside the observed quantile map (e.g. in the early parts of the 20th century) are interpolated using standard anomalies (i.e. number of standard deviations from the mean) calculated for the observed data and GCM data. Although this approach ostensibly assumes a normal distribution, it was found during testing to be much more stable than attempts to use more sophisticated approaches. In particular, the use of Extreme Value Type I or Generalized Extreme Value distributions for extending the tail of the probability distributions were both found to be highly unstable in practice and introduced unacceptable daily extremes in isolated grid cells. These errors occur because of irregularities in the shapes of the CDFs for observed and GCM data, which relates in part to the relatively small sample size used to construct the monthly CDFs (i.e. n = 30)." - sigmaN = 6, # test whether data distributions are within sigmaN * sd of mean - PPTratioCutoff = 10 # above and below that value use additive instead of multiplicative adjustments for precipitation; 3 was too small -> resulting in too many medium-sized ppt-event +downscaling.method <- c("raw", "delta", "hybrid-delta-3mod") #one or multiple of "raw", "delta" (Hay et al. 2002), "hybrid-delta" (Hamlet et al. 2010), or "hybrid-delta-3mod" + +opt_DS <- list( + daily_ppt_limit = 1.5, # + monthly_limit = 1.5, # + ppt_type = "detailed", # either "detailed" or "simple" + fix_spline = "attempt", # one of "fail", "none" or "attempt"; only used if extrapol_type is using splines + # - "fail": downscaling fails if spline extrapolations fall outside estimated monthly extremes + # - "none": no correction for extrapolated monthly extreme values, but this will likely fail during correction of extreme daily PPT events + # - "attempt": repeated attempts with jittering data to fit spline extrapolations within estimated monthly extreme values + extrapol_type = "linear_Thermessl2012CC.QMv1b", # one of "linear_Boe", "linear_Thermessl2012CC.QMv1b", "linear_none", "tricub_fmm", "tricub_monoH.FC", "tricub_natural", "normal_anomalies" + # - "linear": Gudmundsson et al. 2012: "If new model values (e.g. from climate projections) are larger than the training values used to estimate the empirical CDF, the correction found for the highest quantile of the training period is used (Boe ?? et al., 2007; Theme??l et al., 2012)." + # - "tricub": I got really large output values, e.g., obs.hist = 54 cm, scen.fut = 64 cm, sbc.fut = 88 cm, hd.fut = 89 cm + # - "linear" (i.e., using Boe et al.'s correction) resulted for the same site to: obs.hist = 54 cm, scen.fut = 64 cm, sbc.fut = 75 cm, hd.fut = 75 cm + # - "normal", but no implemented in qmap: Tohver et al. 2014, Appendix A, p. 6: "... values that are outside the observed quantile map (e.g. in the early parts of the 20th century) are interpolated using standard anomalies (i.e. number of standard deviations from the mean) calculated for the observed data and GCM data. Although this approach ostensibly assumes a normal distribution, it was found during testing to be much more stable than attempts to use more sophisticated approaches. In particular, the use of Extreme Value Type I or Generalized Extreme Value distributions for extending the tail of the probability distributions were both found to be highly unstable in practice and introduced unacceptable daily extremes in isolated grid cells. These errors occur because of irregularities in the shapes of the CDFs for observed and GCM data, which relates in part to the relatively small sample size used to construct the monthly CDFs (i.e. n = 30)." + sigmaN = 6, # test whether data distributions are within sigmaN * sd of mean + PPTratioCutoff = 10 # above and below that value use additive instead of multiplicative adjustments for precipitation; 3 was too small -> resulting in too many medium-sized ppt-event ) #Climate ensembles created across scenarios @@ -373,6 +379,7 @@ output_aggregates <- c( #---Aggregation: Ecological dryness "dailyNRCS_SoilMoistureTemperatureRegimes", 0, #Requires at least soil layers at 10, 20, 30, 50, 60, 90 cm "dailyNRCS_Chambers2014_ResilienceResistance", 0, #Requires "dailyNRCS_SoilMoistureTemperatureRegimes" + "dailyNRCS_Maestas2016_ResilienceResistance", 0, "dailyWetDegreeDays", 1, "dailyThermalDrynessStartEnd", 1, "dailyThermalSWPConditionCount", 1, diff --git a/tests/Test_projects/Test2_LookupWeatherFolders/2_SWSF_p1of5_Settings_v51.R b/tests/Test_projects/Test2_LookupWeatherFolders/2_SWSF_p1of5_Settings_v51.R index 3d61cc6c..fe0beb89 100644 --- a/tests/Test_projects/Test2_LookupWeatherFolders/2_SWSF_p1of5_Settings_v51.R +++ b/tests/Test_projects/Test2_LookupWeatherFolders/2_SWSF_p1of5_Settings_v51.R @@ -189,16 +189,8 @@ do.ExtractExternalDatasets <- c( "GriddedDailyWeatherFromNRCan_10km_Canada", 0, # can only be used together with database "GriddedDailyWeatherFromNCEPCFSR_Global", 0, # can only be used together with database - #Mean monthly PPT, Tmin, Tmax conditions: if using NEX or GDO-DCP-UC-LLNL, climate condition names must be of the form SCENARIO.GCM with SCENARIO being used for ensembles; if using climatewizard, climate condition names must be equal to what is in the respective directories - #CMIP3 - "ExtractClimateChangeScenarios_CMIP3_ClimateWizardEnsembles_Global", 0, #50-km resolution for mean of 2070-2099 - "ExtractClimateChangeScenarios_CMIP3_ClimateWizardEnsembles_USA", 0, #12-km resolution for mean change between 2070-2099 and 1971-2000 - "ExtractClimateChangeScenarios_CMIP3_BCSD_GDODCPUCLLNL_USA", 0, #1/8-degree resolution - "ExtractClimateChangeScenarios_CMIP3_BCSD_GDODCPUCLLNL_Global", 0, #1/2-degree resolution - #CMIP5 - "ExtractClimateChangeScenarios_CMIP5_BCSD_GDODCPUCLLNL_USA", 1, #1/8-degree resolution - "ExtractClimateChangeScenarios_CMIP5_BCSD_GDODCPUCLLNL_Global", 0, #1/2-degree resolution - "ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_USA", 0, #30-arcsec resolution; requires live internet access + #Monthly PPT, Tmin, Tmax conditions: if using NEX or GDO-DCP-UC-LLNL, climate condition names must be of the form SCENARIO.GCM with SCENARIO being used for ensembles; if using climatewizard, climate condition names must be equal to what is in the respective directories + "ExtractClimateChangeScenarios", 1, #Mean monthly wind, relative humidity, and 100% - sunshine "ExtractSkyDataFromNOAAClimateAtlas_USA", 0, @@ -219,6 +211,20 @@ chunk_size.options <- list( DailyWeatherFromNCEPCFSR_Global = 100 # this is also OS-limited by the number of concurrently open files (on 'unix' platforms, check with 'ulimit -a') ) +opt_climsc_extr <- c( + # for each climate data set from which to extract, add an element like 'dataset1' + # priority of extraction: dataset1, dataset2, ... if multiple sources provide data for a location + # dataset = 'project_source' with + # - project = one string out of c("CMIP3", "CMIP5", "GeoMIP") + # - source = one string out of: + # - "ClimateWizardEnsembles_Global": mean monthly values at 50-km resolution for 2070-2099 + # - "ClimateWizardEnsembles_USA": mean monthly change at 12-km resolution between 2070-2099 and 1971-2000 + # - "BCSD_GDODCPUCLLNL_USA": monthly time series at 1/8-degree resolution + # - "BCSD_GDODCPUCLLNL_Global": monthly time series at 1/2-degree resolution + # - "BCSD_NEX_USA": monthly time series at 30-arcsec resolution; requires live internet access + dataset1 = "CMIP5_BCSD_GDODCPUCLLNL_USA" +) + do.PriorCalculations <- c( "ExtendSoilDatafileToRequestedSoilLayers", 0, "EstimateConstantSoilTemperatureAtUpperAndLowerBoundaryAsMeanAnnualAirTemperature", 1, @@ -252,7 +258,7 @@ rownames(future_yrs) <- make.names(paste0("d", future_yrs[, "delta"], "yrs"), un #------Meta-information of input data datafile.windspeedAtHeightAboveGround <- 2 #SoilWat requires 2 m, but some datasets are at 10 m, e.g., NCEP/CRSF: this value checks windspeed height and if necessary converts to u2 adjust.soilDepth <- FALSE # [FALSE] fill soil layer structure from shallower layer(s) or [TRUE] adjust soil depth if there is no soil texture information for the lowest layers -requested_soil_layers <- seq(10, 100, by = 10) +requested_soil_layers <- c(5, 10, 20, 30, 40, 50, 60, 70, 80, 100, 150) increment_soiltemperature_deltaX_cm <- 5 # If SOILWAT soil temperature is simulated and the solution instable, then the soil profile layer width is increased by this value until a stable solution can be found or total failure is determined #Climate conditions @@ -265,23 +271,23 @@ climate.conditions <- c(climate.ambient, "RCP45.CanESM2", "RCP45.CESM1-CAM5", "R #Downscaling method: monthly scenario -> daily forcing variables #Will be applied to each climate.conditions -downscaling.method <- c("hybrid-delta-3mod") #one or multiple of "raw", "delta" (Hay et al. 2002), "hybrid-delta" (Hamlet et al. 2010), or "hybrid-delta-3mod" - -downscaling.options <- list( - daily_ppt_limit = 1.5, # - monthly_limit = 1.5, # - ppt_type = "detailed", # either "detailed" or "simple" - correct_spline = "attempt", # one of "fail", "none" or "attempt"; only used if extrapol_type is using splines - # - "fail": downscaling fails if spline extrapolations fall outside estimated monthly extremes - # - "none": no correction for extrapolated monthly extreme values, but this will likely fail during correction of extreme daily PPT events - # - "attempt": repeated attempts with jittering data to fit spline extrapolations within estimated monthly extreme values - extrapol_type = "linear_Thermessl2012CC.QMv1b", # one of "linear_Boe", "linear_Thermessl2012CC.QMv1b", "linear_none", "tricub_fmm", "tricub_monoH.FC", "tricub_natural", "normal_anomalies" - # - "linear": Gudmundsson et al. 2012: "If new model values (e.g. from climate projections) are larger than the training values used to estimate the empirical CDF, the correction found for the highest quantile of the training period is used (Boe ?? et al., 2007; Theme??l et al., 2012)." - # - "tricub": I got really large output values, e.g., obs.hist = 54 cm, scen.fut = 64 cm, sbc.fut = 88 cm, hd.fut = 89 cm - # - "linear" (i.e., using Boe et al.'s correction) resulted for the same site to: obs.hist = 54 cm, scen.fut = 64 cm, sbc.fut = 75 cm, hd.fut = 75 cm - # - "normal", but no implemented in qmap: Tohver et al. 2014, Appendix A, p. 6: "... values that are outside the observed quantile map (e.g. in the early parts of the 20th century) are interpolated using standard anomalies (i.e. number of standard deviations from the mean) calculated for the observed data and GCM data. Although this approach ostensibly assumes a normal distribution, it was found during testing to be much more stable than attempts to use more sophisticated approaches. In particular, the use of Extreme Value Type I or Generalized Extreme Value distributions for extending the tail of the probability distributions were both found to be highly unstable in practice and introduced unacceptable daily extremes in isolated grid cells. These errors occur because of irregularities in the shapes of the CDFs for observed and GCM data, which relates in part to the relatively small sample size used to construct the monthly CDFs (i.e. n = 30)." - sigmaN = 6, # test whether data distributions are within sigmaN * sd of mean - PPTratioCutoff = 10 # above and below that value use additive instead of multiplicative adjustments for precipitation; 3 was too small -> resulting in too many medium-sized ppt-event +downscaling.method <- c("raw", "delta", "hybrid-delta-3mod") #one or multiple of "raw", "delta" (Hay et al. 2002), "hybrid-delta" (Hamlet et al. 2010), or "hybrid-delta-3mod" + +opt_DS <- list( + daily_ppt_limit = 1.5, # + monthly_limit = 1.5, # + ppt_type = "detailed", # either "detailed" or "simple" + fix_spline = "attempt", # one of "fail", "none" or "attempt"; only used if extrapol_type is using splines + # - "fail": downscaling fails if spline extrapolations fall outside estimated monthly extremes + # - "none": no correction for extrapolated monthly extreme values, but this will likely fail during correction of extreme daily PPT events + # - "attempt": repeated attempts with jittering data to fit spline extrapolations within estimated monthly extreme values + extrapol_type = "linear_Thermessl2012CC.QMv1b", # one of "linear_Boe", "linear_Thermessl2012CC.QMv1b", "linear_none", "tricub_fmm", "tricub_monoH.FC", "tricub_natural", "normal_anomalies" + # - "linear": Gudmundsson et al. 2012: "If new model values (e.g. from climate projections) are larger than the training values used to estimate the empirical CDF, the correction found for the highest quantile of the training period is used (Boe ?? et al., 2007; Theme??l et al., 2012)." + # - "tricub": I got really large output values, e.g., obs.hist = 54 cm, scen.fut = 64 cm, sbc.fut = 88 cm, hd.fut = 89 cm + # - "linear" (i.e., using Boe et al.'s correction) resulted for the same site to: obs.hist = 54 cm, scen.fut = 64 cm, sbc.fut = 75 cm, hd.fut = 75 cm + # - "normal", but no implemented in qmap: Tohver et al. 2014, Appendix A, p. 6: "... values that are outside the observed quantile map (e.g. in the early parts of the 20th century) are interpolated using standard anomalies (i.e. number of standard deviations from the mean) calculated for the observed data and GCM data. Although this approach ostensibly assumes a normal distribution, it was found during testing to be much more stable than attempts to use more sophisticated approaches. In particular, the use of Extreme Value Type I or Generalized Extreme Value distributions for extending the tail of the probability distributions were both found to be highly unstable in practice and introduced unacceptable daily extremes in isolated grid cells. These errors occur because of irregularities in the shapes of the CDFs for observed and GCM data, which relates in part to the relatively small sample size used to construct the monthly CDFs (i.e. n = 30)." + sigmaN = 6, # test whether data distributions are within sigmaN * sd of mean + PPTratioCutoff = 10 # above and below that value use additive instead of multiplicative adjustments for precipitation; 3 was too small -> resulting in too many medium-sized ppt-event ) #Climate ensembles created across scenarios @@ -373,6 +379,7 @@ output_aggregates <- c( #---Aggregation: Ecological dryness "dailyNRCS_SoilMoistureTemperatureRegimes", 0, #Requires at least soil layers at 10, 20, 30, 50, 60, 90 cm "dailyNRCS_Chambers2014_ResilienceResistance", 0, #Requires "dailyNRCS_SoilMoistureTemperatureRegimes" + "dailyNRCS_Maestas2016_ResilienceResistance", 0, "dailyWetDegreeDays", 1, "dailyThermalDrynessStartEnd", 1, "dailyThermalSWPConditionCount", 1, diff --git a/tests/Test_projects/Test3_OnlyMeanDailyOutput/2_SWSF_p1of5_Settings_v51.R b/tests/Test_projects/Test3_OnlyMeanDailyOutput/2_SWSF_p1of5_Settings_v51.R index 0ed01e99..a703fd65 100644 --- a/tests/Test_projects/Test3_OnlyMeanDailyOutput/2_SWSF_p1of5_Settings_v51.R +++ b/tests/Test_projects/Test3_OnlyMeanDailyOutput/2_SWSF_p1of5_Settings_v51.R @@ -189,16 +189,8 @@ do.ExtractExternalDatasets <- c( "GriddedDailyWeatherFromNRCan_10km_Canada", 0, # can only be used together with database "GriddedDailyWeatherFromNCEPCFSR_Global", 0, # can only be used together with database - #Mean monthly PPT, Tmin, Tmax conditions: if using NEX or GDO-DCP-UC-LLNL, climate condition names must be of the form SCENARIO.GCM with SCENARIO being used for ensembles; if using climatewizard, climate condition names must be equal to what is in the respective directories - #CMIP3 - "ExtractClimateChangeScenarios_CMIP3_ClimateWizardEnsembles_Global", 0, #50-km resolution for mean of 2070-2099 - "ExtractClimateChangeScenarios_CMIP3_ClimateWizardEnsembles_USA", 0, #12-km resolution for mean change between 2070-2099 and 1971-2000 - "ExtractClimateChangeScenarios_CMIP3_BCSD_GDODCPUCLLNL_USA", 0, #1/8-degree resolution - "ExtractClimateChangeScenarios_CMIP3_BCSD_GDODCPUCLLNL_Global", 0, #1/2-degree resolution - #CMIP5 - "ExtractClimateChangeScenarios_CMIP5_BCSD_GDODCPUCLLNL_USA", 0, #1/8-degree resolution - "ExtractClimateChangeScenarios_CMIP5_BCSD_GDODCPUCLLNL_Global", 0, #1/2-degree resolution - "ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_USA", 0, #30-arcsec resolution; requires live internet access + #Monthly PPT, Tmin, Tmax conditions: if using NEX or GDO-DCP-UC-LLNL, climate condition names must be of the form SCENARIO.GCM with SCENARIO being used for ensembles; if using climatewizard, climate condition names must be equal to what is in the respective directories + "ExtractClimateChangeScenarios", 0, #Mean monthly wind, relative humidity, and 100% - sunshine "ExtractSkyDataFromNOAAClimateAtlas_USA", 0, @@ -219,6 +211,20 @@ chunk_size.options <- list( DailyWeatherFromNCEPCFSR_Global = 100 # this is also OS-limited by the number of concurrently open files (on 'unix' platforms, check with 'ulimit -a') ) +opt_climsc_extr <- c( + # for each climate data set from which to extract, add an element like 'dataset1' + # priority of extraction: dataset1, dataset2, ... if multiple sources provide data for a location + # dataset = 'project_source' with + # - project = one string out of c("CMIP3", "CMIP5", "GeoMIP") + # - source = one string out of: + # - "ClimateWizardEnsembles_Global": mean monthly values at 50-km resolution for 2070-2099 + # - "ClimateWizardEnsembles_USA": mean monthly change at 12-km resolution between 2070-2099 and 1971-2000 + # - "BCSD_GDODCPUCLLNL_USA": monthly time series at 1/8-degree resolution + # - "BCSD_GDODCPUCLLNL_Global": monthly time series at 1/2-degree resolution + # - "BCSD_NEX_USA": monthly time series at 30-arcsec resolution; requires live internet access + dataset1 = "CMIP5_BCSD_GDODCPUCLLNL_USA" +) + do.PriorCalculations <- c( "ExtendSoilDatafileToRequestedSoilLayers", 0, "EstimateConstantSoilTemperatureAtUpperAndLowerBoundaryAsMeanAnnualAirTemperature", 1, @@ -252,7 +258,7 @@ rownames(future_yrs) <- make.names(paste0("d", future_yrs[, "delta"], "yrs"), un #------Meta-information of input data datafile.windspeedAtHeightAboveGround <- 2 #SoilWat requires 2 m, but some datasets are at 10 m, e.g., NCEP/CRSF: this value checks windspeed height and if necessary converts to u2 adjust.soilDepth <- FALSE # [FALSE] fill soil layer structure from shallower layer(s) or [TRUE] adjust soil depth if there is no soil texture information for the lowest layers -requested_soil_layers <- seq(10, 100, by = 10) +requested_soil_layers <- c(5, 10, 20, 30, 40, 50, 60, 70, 80, 100, 150) increment_soiltemperature_deltaX_cm <- 5 # If SOILWAT soil temperature is simulated and the solution instable, then the soil profile layer width is increased by this value until a stable solution can be found or total failure is determined #Climate conditions @@ -267,21 +273,21 @@ climate.conditions <- c(climate.ambient, "RCP45.CanESM2", "RCP45.CESM1-CAM5", "R #Will be applied to each climate.conditions downscaling.method <- c("hybrid-delta-3mod") #one or multiple of "raw", "delta" (Hay et al. 2002), "hybrid-delta" (Hamlet et al. 2010), or "hybrid-delta-3mod" -downscaling.options <- list( - daily_ppt_limit = 1.5, # - monthly_limit = 1.5, # - ppt_type = "detailed", # either "detailed" or "simple" - correct_spline = "attempt", # one of "fail", "none" or "attempt"; only used if extrapol_type is using splines - # - "fail": downscaling fails if spline extrapolations fall outside estimated monthly extremes - # - "none": no correction for extrapolated monthly extreme values, but this will likely fail during correction of extreme daily PPT events - # - "attempt": repeated attempts with jittering data to fit spline extrapolations within estimated monthly extreme values - extrapol_type = "linear_Thermessl2012CC.QMv1b", # one of "linear_Boe", "linear_Thermessl2012CC.QMv1b", "linear_none", "tricub_fmm", "tricub_monoH.FC", "tricub_natural", "normal_anomalies" - # - "linear": Gudmundsson et al. 2012: "If new model values (e.g. from climate projections) are larger than the training values used to estimate the empirical CDF, the correction found for the highest quantile of the training period is used (Boe ?? et al., 2007; Theme??l et al., 2012)." - # - "tricub": I got really large output values, e.g., obs.hist = 54 cm, scen.fut = 64 cm, sbc.fut = 88 cm, hd.fut = 89 cm - # - "linear" (i.e., using Boe et al.'s correction) resulted for the same site to: obs.hist = 54 cm, scen.fut = 64 cm, sbc.fut = 75 cm, hd.fut = 75 cm - # - "normal", but no implemented in qmap: Tohver et al. 2014, Appendix A, p. 6: "... values that are outside the observed quantile map (e.g. in the early parts of the 20th century) are interpolated using standard anomalies (i.e. number of standard deviations from the mean) calculated for the observed data and GCM data. Although this approach ostensibly assumes a normal distribution, it was found during testing to be much more stable than attempts to use more sophisticated approaches. In particular, the use of Extreme Value Type I or Generalized Extreme Value distributions for extending the tail of the probability distributions were both found to be highly unstable in practice and introduced unacceptable daily extremes in isolated grid cells. These errors occur because of irregularities in the shapes of the CDFs for observed and GCM data, which relates in part to the relatively small sample size used to construct the monthly CDFs (i.e. n = 30)." - sigmaN = 6, # test whether data distributions are within sigmaN * sd of mean - PPTratioCutoff = 10 # above and below that value use additive instead of multiplicative adjustments for precipitation; 3 was too small -> resulting in too many medium-sized ppt-event +opt_DS <- list( + daily_ppt_limit = 1.5, # + monthly_limit = 1.5, # + ppt_type = "detailed", # either "detailed" or "simple" + fix_spline = "attempt", # one of "fail", "none" or "attempt"; only used if extrapol_type is using splines + # - "fail": downscaling fails if spline extrapolations fall outside estimated monthly extremes + # - "none": no correction for extrapolated monthly extreme values, but this will likely fail during correction of extreme daily PPT events + # - "attempt": repeated attempts with jittering data to fit spline extrapolations within estimated monthly extreme values + extrapol_type = "linear_Thermessl2012CC.QMv1b", # one of "linear_Boe", "linear_Thermessl2012CC.QMv1b", "linear_none", "tricub_fmm", "tricub_monoH.FC", "tricub_natural", "normal_anomalies" + # - "linear": Gudmundsson et al. 2012: "If new model values (e.g. from climate projections) are larger than the training values used to estimate the empirical CDF, the correction found for the highest quantile of the training period is used (Boe ?? et al., 2007; Theme??l et al., 2012)." + # - "tricub": I got really large output values, e.g., obs.hist = 54 cm, scen.fut = 64 cm, sbc.fut = 88 cm, hd.fut = 89 cm + # - "linear" (i.e., using Boe et al.'s correction) resulted for the same site to: obs.hist = 54 cm, scen.fut = 64 cm, sbc.fut = 75 cm, hd.fut = 75 cm + # - "normal", but no implemented in qmap: Tohver et al. 2014, Appendix A, p. 6: "... values that are outside the observed quantile map (e.g. in the early parts of the 20th century) are interpolated using standard anomalies (i.e. number of standard deviations from the mean) calculated for the observed data and GCM data. Although this approach ostensibly assumes a normal distribution, it was found during testing to be much more stable than attempts to use more sophisticated approaches. In particular, the use of Extreme Value Type I or Generalized Extreme Value distributions for extending the tail of the probability distributions were both found to be highly unstable in practice and introduced unacceptable daily extremes in isolated grid cells. These errors occur because of irregularities in the shapes of the CDFs for observed and GCM data, which relates in part to the relatively small sample size used to construct the monthly CDFs (i.e. n = 30)." + sigmaN = 6, # test whether data distributions are within sigmaN * sd of mean + PPTratioCutoff = 10 # above and below that value use additive instead of multiplicative adjustments for precipitation; 3 was too small -> resulting in too many medium-sized ppt-event ) #Climate ensembles created across scenarios @@ -373,6 +379,7 @@ output_aggregates <- c( #---Aggregation: Ecological dryness "dailyNRCS_SoilMoistureTemperatureRegimes", 0, #Requires at least soil layers at 10, 20, 30, 50, 60, 90 cm "dailyNRCS_Chambers2014_ResilienceResistance", 0, #Requires "dailyNRCS_SoilMoistureTemperatureRegimes" + "dailyNRCS_Maestas2016_ResilienceResistance", 0, "dailyWetDegreeDays", 0, "dailyThermalDrynessStartEnd", 0, "dailyThermalSWPConditionCount", 0, diff --git a/tests/Test_projects/Test4_AllOverallAggregations_snow/1_Data_SWInput/SWRuns_InputData_SoilLayers_v9.csv b/tests/Test_projects/Test4_AllOverallAggregations_snow/1_Data_SWInput/SWRuns_InputData_SoilLayers_v9.csv index cfde893b..09735046 100644 --- a/tests/Test_projects/Test4_AllOverallAggregations_snow/1_Data_SWInput/SWRuns_InputData_SoilLayers_v9.csv +++ b/tests/Test_projects/Test4_AllOverallAggregations_snow/1_Data_SWInput/SWRuns_InputData_SoilLayers_v9.csv @@ -1,6 +1,7 @@ -"Label","SoilDepth_cm","depth_L1","depth_L2","depth_L3","depth_L4","depth_L5","depth_L6","depth_L7","depth_L8","depth_L9","depth_L10","depth_L11","depth_L12","depth_L13","depth_L14","depth_L15","depth_L16","depth_L17","depth_L18","depth_L19","depth_L20" -"Site01",91,10,20,40,60,80,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -"Site02",91,10,20,40,60,80,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -"Site03",91,10,20,40,60,80,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -"Site04",91,10,20,40,60,80,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -"Site04",91,10,20,40,60,80,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Label,SoilDepth_cm,depth_L1,depth_L2,depth_L3,depth_L4,depth_L5,depth_L6,depth_L7,depth_L8,depth_L9,depth_L10,depth_L11,depth_L12,depth_L13,depth_L14,depth_L15,depth_L16,depth_L17,depth_L18,depth_L19,depth_L20 +Site01,91,10,20,40,60,80,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Site02,91,10,20,40,60,80,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Site03,91,10,20,40,60,80,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Site04,91,10,20,40,60,80,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Site05,91,10,20,40,60,80,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Site06,3,10,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA diff --git a/tests/Test_projects/Test4_AllOverallAggregations_snow/1_Data_SWInput/SWRuns_InputMaster_Test_v11.csv b/tests/Test_projects/Test4_AllOverallAggregations_snow/1_Data_SWInput/SWRuns_InputMaster_Test_v11.csv index 2a5ad4a6..d216f7b9 100644 --- a/tests/Test_projects/Test4_AllOverallAggregations_snow/1_Data_SWInput/SWRuns_InputMaster_Test_v11.csv +++ b/tests/Test_projects/Test4_AllOverallAggregations_snow/1_Data_SWInput/SWRuns_InputMaster_Test_v11.csv @@ -1,6 +1,7 @@ -"Label","site_id","WeatherFolder","X_WGS84","Y_WGS84","ELEV_m","Include_YN","dailyweather_source","GCM_sources","Include_YN_ClimateScenarioSources","SoilTexture_source","Include_YN_SoilSources","Elevation_source","Include_YN_ElevationSources","ClimateNormals_source","Include_YN_ClimateNormalSources" -"Site01",1,"Site01_weatherdata",-106.29953,35.76555,2072,1,"LookupWeatherFolder","CMIP5_BCSD_GDODCPUCLLNL_USA",1,"ISRICWISEv12_Global",1,"Elevation_NED_USA",1,"ClimateNormals_NCDC2005_USA",1 -"Site02",2,"Site02_weatherdata",-106.27478,35.76451,2072,1,"LookupWeatherFolder","CMIP5_BCSD_GDODCPUCLLNL_USA",1,"ISRICWISEv12_Global",1,"Elevation_NED_USA",1,"ClimateNormals_NCDC2005_USA",1 -"Site03",3,"Site03_weatherdata",-106.28126,35.7799,2072,1,"LookupWeatherFolder","CMIP5_BCSD_GDODCPUCLLNL_USA",1,"ISRICWISEv12_Global",1,"Elevation_NED_USA",1,"ClimateNormals_NCDC2005_USA",1 -"Site04",4,NA,-106.28749,35.7953,2072,0,NA,NA,0,NA,0,NA,0,NA,0 -"Site05",5,"Site05_weatherdata",-106.28749,35.7953,2072,1,"LookupWeatherFolder","CMIP5_BCSD_GDODCPUCLLNL_USA",1,"ISRICWISEv12_Global",1,"Elevation_NED_USA",1,"ClimateNormals_NCDC2005_USA",1 +Label,site_id,WeatherFolder,X_WGS84,Y_WGS84,ELEV_m,Include_YN,dailyweather_source,GCM_sources,Include_YN_ClimateScenarioSources,SoilTexture_source,Include_YN_SoilSources,Elevation_source,Include_YN_ElevationSources,ClimateNormals_source,Include_YN_ClimateNormalSources +Site01,1,Site01_weatherdata,-106.29953,35.76555,2072,1,LookupWeatherFolder,CMIP5_BCSD_GDODCPUCLLNL_USA,1,ISRICWISEv12_Global,1,Elevation_NED_USA,1,ClimateNormals_NCDC2005_USA,1 +Site02,2,Site02_weatherdata,-106.27478,35.76451,2072,1,LookupWeatherFolder,CMIP5_BCSD_GDODCPUCLLNL_USA,1,ISRICWISEv12_Global,1,Elevation_NED_USA,1,ClimateNormals_NCDC2005_USA,1 +Site03,3,Site03_weatherdata,-106.28126,35.7799,2072,1,LookupWeatherFolder,CMIP5_BCSD_GDODCPUCLLNL_USA,1,ISRICWISEv12_Global,1,Elevation_NED_USA,1,ClimateNormals_NCDC2005_USA,1 +Site04,4,NA,-106.28749,35.7953,2072,0,NA,NA,0,NA,0,NA,0,NA,0 +Site05,5,Site05_weatherdata,-106.28749,35.7953,2072,1,LookupWeatherFolder,CMIP5_BCSD_GDODCPUCLLNL_USA,1,ISRICWISEv12_Global,1,Elevation_NED_USA,1,ClimateNormals_NCDC2005_USA,1 +Site06,6,Site06_weatherdata,-106.28749,35.7953,2072,1,LookupWeatherFolder,CMIP5_BCSD_GDODCPUCLLNL_USA,1,ISRICWISEv12_Global,1,Elevation_NED_USA,1,ClimateNormals_NCDC2005_USA,1 diff --git a/tests/Test_projects/Test4_AllOverallAggregations_snow/1_Data_SWInput/datafiles/SWRuns_InputData_cloud_v10.csv b/tests/Test_projects/Test4_AllOverallAggregations_snow/1_Data_SWInput/datafiles/SWRuns_InputData_cloud_v10.csv index bab30224..546e292a 100644 --- a/tests/Test_projects/Test4_AllOverallAggregations_snow/1_Data_SWInput/datafiles/SWRuns_InputData_cloud_v10.csv +++ b/tests/Test_projects/Test4_AllOverallAggregations_snow/1_Data_SWInput/datafiles/SWRuns_InputData_cloud_v10.csv @@ -1,7 +1,8 @@ -"Label","SkyC_1","SkyC_2","SkyC_3","SkyC_4","SkyC_5","SkyC_6","SkyC_7","SkyC_8","SkyC_9","SkyC_10","SkyC_11","SkyC_12","SkyC_Source","wind_ms_1","wind_ms_2","wind_ms_3","wind_ms_4","wind_ms_5","wind_ms_6","wind_ms_7","wind_ms_8","wind_ms_9","wind_ms_10","wind_ms_11","wind_ms_12","Wind_Source","RH_1","RH_2","RH_3","RH_4","RH_5","RH_6","RH_7","RH_8","RH_9","RH_10","RH_11","RH_12","RH_Source","snowd_1","snowd_2","snowd_3","snowd_4","snowd_5","snowd_6","snowd_7","snowd_8","snowd_9","snowd_10","snowd_11","snowd_12","SnowD_Hemisphere","SnowD_Source" -"UseInformationToCreateSoilWatRuns",1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,"1",0 -"Site01",34,34,34,34,34,34,34,34,34,34,34,34,NA,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,NA,61,61,61,61,61,61,61,61,61,61,61,61,NA,213.7,241.6,261,308,398.1,464.5,76,76,76,140,161.6,185.1,"N",NA -"Site02",34,34,34,34,34,34,34,34,34,34,34,34,NA,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,NA,61,61,61,61,61,61,61,61,61,61,61,61,NA,213.7,241.6,261,308,398.1,464.5,76,76,76,140,161.6,185.1,"N",NA -"Site03",34,34,34,34,34,34,34,34,34,34,34,34,NA,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,NA,61,61,61,61,61,61,61,61,61,61,61,61,NA,213.7,241.6,261,308,398.1,464.5,76,76,76,140,161.6,185.1,"N",NA -"Site04",34,34,34,34,34,34,34,34,34,34,34,34,NA,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,NA,61,61,61,61,61,61,61,61,61,61,61,61,NA,213.7,241.6,261,308,398.1,464.5,76,76,76,140,161.6,185.1,"N",NA -"Site05",34,34,34,34,34,34,34,34,34,34,34,34,NA,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,NA,61,61,61,61,61,61,61,61,61,61,61,61,NA,213.7,241.6,261,308,398.1,464.5,76,76,76,140,161.6,185.1,"N",NA +Label,SkyC_1,SkyC_2,SkyC_3,SkyC_4,SkyC_5,SkyC_6,SkyC_7,SkyC_8,SkyC_9,SkyC_10,SkyC_11,SkyC_12,SkyC_Source,wind_ms_1,wind_ms_2,wind_ms_3,wind_ms_4,wind_ms_5,wind_ms_6,wind_ms_7,wind_ms_8,wind_ms_9,wind_ms_10,wind_ms_11,wind_ms_12,Wind_Source,RH_1,RH_2,RH_3,RH_4,RH_5,RH_6,RH_7,RH_8,RH_9,RH_10,RH_11,RH_12,RH_Source,snowd_1,snowd_2,snowd_3,snowd_4,snowd_5,snowd_6,snowd_7,snowd_8,snowd_9,snowd_10,snowd_11,snowd_12,SnowD_Hemisphere,SnowD_Source +UseInformationToCreateSoilWatRuns,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0 +Site01,34,34,34,34,34,34,34,34,34,34,34,34,NA,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,NA,61,61,61,61,61,61,61,61,61,61,61,61,NA,213.7,241.6,261,308,398.1,464.5,76,76,76,140,161.6,185.1,N,NA +Site02,34,34,34,34,34,34,34,34,34,34,34,34,NA,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,NA,61,61,61,61,61,61,61,61,61,61,61,61,NA,213.7,241.6,261,308,398.1,464.5,76,76,76,140,161.6,185.1,N,NA +Site03,34,34,34,34,34,34,34,34,34,34,34,34,NA,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,NA,61,61,61,61,61,61,61,61,61,61,61,61,NA,213.7,241.6,261,308,398.1,464.5,76,76,76,140,161.6,185.1,N,NA +Site04,34,34,34,34,34,34,34,34,34,34,34,34,NA,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,NA,61,61,61,61,61,61,61,61,61,61,61,61,NA,213.7,241.6,261,308,398.1,464.5,76,76,76,140,161.6,185.1,N,NA +Site05,34,34,34,34,34,34,34,34,34,34,34,34,NA,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,NA,61,61,61,61,61,61,61,61,61,61,61,61,NA,213.7,241.6,261,308,398.1,464.5,76,76,76,140,161.6,185.1,N,NA +Site06,34,34,34,34,34,34,34,34,34,34,34,34,NA,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,3.3,NA,61,61,61,61,61,61,61,61,61,61,61,61,NA,213.7,241.6,261,308,398.1,464.5,76,76,76,140,161.6,185.1,N,NA diff --git a/tests/Test_projects/Test4_AllOverallAggregations_snow/1_Data_SWInput/datafiles/SWRuns_InputData_soils_v11.csv b/tests/Test_projects/Test4_AllOverallAggregations_snow/1_Data_SWInput/datafiles/SWRuns_InputData_soils_v11.csv index 0bf97e62..5b014c86 100644 --- a/tests/Test_projects/Test4_AllOverallAggregations_snow/1_Data_SWInput/datafiles/SWRuns_InputData_soils_v11.csv +++ b/tests/Test_projects/Test4_AllOverallAggregations_snow/1_Data_SWInput/datafiles/SWRuns_InputData_soils_v11.csv @@ -1,7 +1,8 @@ -"Label","Matricd_L1","GravelContent_L1","EvapCoeff_L1","Grass_TranspCoeff_L1","Shrub_TranspCoeff_L1","Tree_TranspCoeff_L1","Forb_TranspCoeff_L1","TranspRegion_L1","Sand_L1","Clay_L1","Imperm_L1","SoilTemp_L1","Matricd_L2","GravelContent_L2","EvapCoeff_L2","Grass_TranspCoeff_L2","Shrub_TranspCoeff_L2","Tree_TranspCoeff_L2","Forb_TranspCoeff_L2","TranspRegion_L2","Sand_L2","Clay_L2","Imperm_L2","SoilTemp_L2","Matricd_L3","GravelContent_L3","EvapCoeff_L3","Grass_TranspCoeff_L3","Shrub_TranspCoeff_L3","Tree_TranspCoeff_L3","Forb_TranspCoeff_L3","TranspRegion_L3","Sand_L3","Clay_L3","Imperm_L3","SoilTemp_L3","Matricd_L4","GravelContent_L4","EvapCoeff_L4","Grass_TranspCoeff_L4","Shrub_TranspCoeff_L4","Tree_TranspCoeff_L4","Forb_TranspCoeff_L4","TranspRegion_L4","Sand_L4","Clay_L4","Imperm_L4","SoilTemp_L4","Matricd_L5","GravelContent_L5","EvapCoeff_L5","Grass_TranspCoeff_L5","Shrub_TranspCoeff_L5","Tree_TranspCoeff_L5","Forb_TranspCoeff_L5","TranspRegion_L5","Sand_L5","Clay_L5","Imperm_L5","SoilTemp_L5","Matricd_L6","GravelContent_L6","EvapCoeff_L6","Grass_TranspCoeff_L6","Shrub_TranspCoeff_L6","Tree_TranspCoeff_L6","Forb_TranspCoeff_L6","TranspRegion_L6","Sand_L6","Clay_L6","Imperm_L6","SoilTemp_L6","Matricd_L7","GravelContent_L7","EvapCoeff_L7","Grass_TranspCoeff_L7","Shrub_TranspCoeff_L7","Tree_TranspCoeff_L7","Forb_TranspCoeff_L7","TranspRegion_L7","Sand_L7","Clay_L7","Imperm_L7","SoilTemp_L7","Matricd_L8","GravelContent_L8","EvapCoeff_L8","Grass_TranspCoeff_L8","Shrub_TranspCoeff_L8","Tree_TranspCoeff_L8","Forb_TranspCoeff_L8","TranspRegion_L8","Sand_L8","Clay_L8","Imperm_L8","SoilTemp_L8","Matricd_L9","GravelContent_L9","EvapCoeff_L9","Grass_TranspCoeff_L9","Shrub_TranspCoeff_L9","Tree_TranspCoeff_L9","Forb_TranspCoeff_L9","TranspRegion_L9","Sand_L9","Clay_L9","Imperm_L9","SoilTemp_L9","Matricd_L10","GravelContent_L10","EvapCoeff_L10","Grass_TranspCoeff_L10","Shrub_TranspCoeff_L10","Tree_TranspCoeff_L10","Forb_TranspCoeff_L10","TranspRegion_L10","Sand_L10","Clay_L10","Imperm_L10","SoilTemp_L10","Matricd_L11","GravelContent_L11","EvapCoeff_L11","Grass_TranspCoeff_L11","Shrub_TranspCoeff_L11","Tree_TranspCoeff_L11","Forb_TranspCoeff_L11","TranspRegion_L11","Sand_L11","Clay_L11","Imperm_L11","SoilTemp_L11","Matricd_L12","GravelContent_L12","EvapCoeff_L12","Grass_TranspCoeff_L12","Shrub_TranspCoeff_L12","Tree_TranspCoeff_L12","Forb_TranspCoeff_L12","TranspRegion_L12","Sand_L12","Clay_L12","Imperm_L12","SoilTemp_L12","Matricd_L13","GravelContent_L13","EvapCoeff_L13","Grass_TranspCoeff_L13","Shrub_TranspCoeff_L13","Tree_TranspCoeff_L13","Forb_TranspCoeff_L13","TranspRegion_L13","Sand_L13","Clay_L13","Imperm_L13","SoilTemp_L13","Matricd_L14","GravelContent_L14","EvapCoeff_L14","Grass_TranspCoeff_L14","Shrub_TranspCoeff_L14","Tree_TranspCoeff_L14","Forb_TranspCoeff_L14","TranspRegion_L14","Sand_L14","Clay_L14","Imperm_L14","SoilTemp_L14","Matricd_L15","GravelContent_L15","EvapCoeff_L15","Grass_TranspCoeff_L15","Shrub_TranspCoeff_L15","Tree_TranspCoeff_L15","Forb_TranspCoeff_L15","TranspRegion_L15","Sand_L15","Clay_L15","Imperm_L15","SoilTemp_L15","Matricd_L16","GravelContent_L16","EvapCoeff_L16","Grass_TranspCoeff_L16","Shrub_TranspCoeff_L16","Tree_TranspCoeff_L16","Forb_TranspCoeff_L16","TranspRegion_L16","Sand_L16","Clay_L16","Imperm_L16","SoilTemp_L16","Matricd_L17","GravelContent_L17","EvapCoeff_L17","Grass_TranspCoeff_L17","Shrub_TranspCoeff_L17","Tree_TranspCoeff_L17","Forb_TranspCoeff_L17","TranspRegion_L17","Sand_L17","Clay_L17","Imperm_L17","SoilTemp_L17","Matricd_L18","GravelContent_L18","EvapCoeff_L18","Grass_TranspCoeff_L18","Shrub_TranspCoeff_L18","Tree_TranspCoeff_L18","Forb_TranspCoeff_L18","TranspRegion_L18","Sand_L18","Clay_L18","Imperm_L18","SoilTemp_L18","Matricd_L19","GravelContent_L19","EvapCoeff_L19","Grass_TranspCoeff_L19","Shrub_TranspCoeff_L19","Tree_TranspCoeff_L19","Forb_TranspCoeff_L19","TranspRegion_L19","Sand_L19","Clay_L19","Imperm_L19","SoilTemp_L19","Matricd_L20","GravelContent_L20","EvapCoeff_L20","Grass_TranspCoeff_L20","Shrub_TranspCoeff_L20","Tree_TranspCoeff_L20","Forb_TranspCoeff_L20","TranspRegion_L20","Sand_L20","Clay_L20","Imperm_L20","SoilTemp_L20" -"UseInformationToCreateSoilWatRuns",1,1,1,0,0,0,0,1,1,1,1,0,1,1,0,0,0,0,0,1,1,1,1,0,1,1,0,0,0,0,0,1,1,1,1,0,1,1,0,0,0,0,0,1,1,1,1,0,1,1,0,0,0,0,0,1,1,1,1,0,1,1,0,0,0,0,0,1,1,1,1,0,1,1,0,0,0,0,0,1,1,1,1,0,1,1,0,0,0,0,0,1,1,1,1,0,1,1,0,0,0,0,0,0,1,1,1,0,1,1,0,0,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,1,1,0,0,1,1,0,1,1,1,1,0,1,1,1,1,1,1,0,1,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 -"Site01",1.46,0.11,1,NA,NA,NA,NA,1,0.67,0.16,0,NA,1.47,0.09,NA,NA,NA,NA,NA,1,0.7,0.15,0,NA,1.46,0.11,NA,NA,NA,NA,NA,1,0.66,0.18,0,NA,1.52,0.11,NA,NA,NA,NA,NA,2,0.63,0.19,0,NA,1.55,0.14,NA,NA,NA,NA,NA,2,0.6,0.22,0,NA,1.53,0.13,NA,NA,NA,NA,NA,2,0.6,0.22,0,NA,1.4,0.1,NA,NA,NA,NA,NA,3,0.168,0.1785,0,NA,1.49,0.1,NA,NA,NA,NA,NA,3,0.08,0.085,0,NA,1.5,0.1,NA,NA,NA,NA,NA,NA,0.08,0.085,0,NA,1.5,0.1,NA,NA,NA,NA,NA,NA,0.08,0.085,0,NA,1.455,0.1,NA,NA,NA,NA,NA,NA,0.08,0.085,0,NA,1.05,0.1,NA,NA,NA,NA,NA,NA,0.03415,0.006,0,NA,1.05,0.1,NA,NA,NA,NA,NA,NA,0.03415,0.006,0,NA,1.05,0.1,NA,NA,NA,NA,NA,NA,0.03415,0.006,0,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,0,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.4,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -"Site02",1.46,0.11,1,NA,NA,NA,NA,1,0.67,0.16,0,NA,1.47,0.09,NA,NA,NA,NA,NA,1,0.7,0.15,0,NA,1.46,0.11,NA,NA,NA,NA,NA,1,0.66,0.18,0,NA,1.52,0.11,NA,NA,NA,NA,NA,2,0.63,0.19,0,NA,1.55,0.14,NA,NA,NA,NA,NA,2,0.6,0.22,0,NA,1.53,0.13,NA,NA,NA,NA,NA,2,0.6,0.22,0,NA,1.4,0.1,NA,NA,NA,NA,NA,3,0.24,0.255,0,NA,1.49,0.1,NA,NA,NA,NA,NA,3,0.24,0.255,0,NA,1.5,0.1,NA,NA,NA,NA,NA,NA,0.24,0.255,0,NA,1.5,0.1,NA,NA,NA,NA,NA,NA,0.435,0.2025,0,NA,1.5,0.1,NA,NA,NA,NA,NA,NA,0.435,0.2025,0,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,0,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,0,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,0,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,0,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.4,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -"Site03",1.46,0.11,1,NA,NA,NA,NA,1,0.67,0.16,0,NA,1.47,0.09,NA,NA,NA,NA,NA,1,0.7,0.15,0,NA,1.46,0.11,NA,NA,NA,NA,NA,1,0.66,0.18,0,NA,1.52,0.11,NA,NA,NA,NA,NA,2,0.63,0.19,0,NA,1.55,0.14,NA,NA,NA,NA,NA,2,0.6,0.22,0,NA,1.53,0.13,NA,NA,NA,NA,NA,2,0.6,0.22,0,NA,1.1,0.1,NA,NA,NA,NA,NA,3,0.08,0.085,0,NA,NA,NA,NA,NA,2,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,2,NA,NA,1,NA,NA,NA,NA,NA,NA,NA,NA,2,NA,NA,1,NA,NA,NA,NA,NA,NA,NA,NA,2,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,0,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,0,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,0,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,0,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.05,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -"Site04",1.46,0.11,1,NA,NA,NA,NA,1,0.67,0.16,0,NA,1.47,0.09,NA,NA,NA,NA,NA,1,0.7,0.15,0,NA,1.46,0.11,NA,NA,NA,NA,NA,1,0.66,0.18,0,NA,1.52,0.11,NA,NA,NA,NA,NA,2,0.63,0.19,0,NA,1.55,0.14,NA,NA,NA,NA,NA,2,0.6,0.22,0,NA,1.53,0.13,NA,NA,NA,NA,NA,2,0.6,0.22,0,NA,1.095,0.1,NA,NA,NA,NA,NA,3,0.483,0.01575,0,NA,1.05,0.1,NA,NA,NA,NA,NA,3,0.483,0.01575,0,NA,1.05,0.1,NA,NA,NA,NA,NA,NA,0.483,0.01575,0,NA,1.05,0.1,NA,NA,NA,NA,NA,NA,0.483,0.01575,0,NA,1.05,0.1,NA,NA,NA,NA,NA,NA,0.23,0.0075,0,NA,1.05,0.1,NA,NA,NA,NA,NA,NA,0.03415,0.006,0,NA,1.05,0.1,NA,NA,NA,NA,NA,NA,0.03415,0.006,0,NA,1.05,0.1,NA,NA,NA,NA,NA,NA,0.03415,0.006,0,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,0,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.45,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -"Site05",1.46,0.11,1,NA,NA,NA,NA,1,0.67,0.16,0,NA,1.47,0.09,NA,NA,NA,NA,NA,1,0.7,0.15,0,NA,1.46,0.11,NA,NA,NA,NA,NA,1,0.66,0.18,0,NA,1.52,0.11,NA,NA,NA,NA,NA,2,0.63,0.19,0,NA,1.55,0.14,NA,NA,NA,NA,NA,2,0.6,0.22,0,NA,1.53,0.13,NA,NA,NA,NA,NA,2,0.6,0.22,0,NA,1.095,0.1,NA,NA,NA,NA,NA,3,0.483,0.01575,0,NA,1.05,0.1,NA,NA,NA,NA,NA,3,0.483,0.01575,0,NA,1.05,0.1,NA,NA,NA,NA,NA,NA,0.483,0.01575,0,NA,1.05,0.1,NA,NA,NA,NA,NA,NA,0.483,0.01575,0,NA,1.05,0.1,NA,NA,NA,NA,NA,NA,0.23,0.0075,0,NA,1.05,0.1,NA,NA,NA,NA,NA,NA,0.03415,0.006,0,NA,1.05,0.1,NA,NA,NA,NA,NA,NA,0.03415,0.006,0,NA,1.05,0.1,NA,NA,NA,NA,NA,NA,0.03415,0.006,0,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,0,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.45,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Label,Matricd_L1,GravelContent_L1,EvapCoeff_L1,Grass_TranspCoeff_L1,Shrub_TranspCoeff_L1,Tree_TranspCoeff_L1,Forb_TranspCoeff_L1,TranspRegion_L1,Sand_L1,Clay_L1,Imperm_L1,SoilTemp_L1,Matricd_L2,GravelContent_L2,EvapCoeff_L2,Grass_TranspCoeff_L2,Shrub_TranspCoeff_L2,Tree_TranspCoeff_L2,Forb_TranspCoeff_L2,TranspRegion_L2,Sand_L2,Clay_L2,Imperm_L2,SoilTemp_L2,Matricd_L3,GravelContent_L3,EvapCoeff_L3,Grass_TranspCoeff_L3,Shrub_TranspCoeff_L3,Tree_TranspCoeff_L3,Forb_TranspCoeff_L3,TranspRegion_L3,Sand_L3,Clay_L3,Imperm_L3,SoilTemp_L3,Matricd_L4,GravelContent_L4,EvapCoeff_L4,Grass_TranspCoeff_L4,Shrub_TranspCoeff_L4,Tree_TranspCoeff_L4,Forb_TranspCoeff_L4,TranspRegion_L4,Sand_L4,Clay_L4,Imperm_L4,SoilTemp_L4,Matricd_L5,GravelContent_L5,EvapCoeff_L5,Grass_TranspCoeff_L5,Shrub_TranspCoeff_L5,Tree_TranspCoeff_L5,Forb_TranspCoeff_L5,TranspRegion_L5,Sand_L5,Clay_L5,Imperm_L5,SoilTemp_L5,Matricd_L6,GravelContent_L6,EvapCoeff_L6,Grass_TranspCoeff_L6,Shrub_TranspCoeff_L6,Tree_TranspCoeff_L6,Forb_TranspCoeff_L6,TranspRegion_L6,Sand_L6,Clay_L6,Imperm_L6,SoilTemp_L6,Matricd_L7,GravelContent_L7,EvapCoeff_L7,Grass_TranspCoeff_L7,Shrub_TranspCoeff_L7,Tree_TranspCoeff_L7,Forb_TranspCoeff_L7,TranspRegion_L7,Sand_L7,Clay_L7,Imperm_L7,SoilTemp_L7,Matricd_L8,GravelContent_L8,EvapCoeff_L8,Grass_TranspCoeff_L8,Shrub_TranspCoeff_L8,Tree_TranspCoeff_L8,Forb_TranspCoeff_L8,TranspRegion_L8,Sand_L8,Clay_L8,Imperm_L8,SoilTemp_L8,Matricd_L9,GravelContent_L9,EvapCoeff_L9,Grass_TranspCoeff_L9,Shrub_TranspCoeff_L9,Tree_TranspCoeff_L9,Forb_TranspCoeff_L9,TranspRegion_L9,Sand_L9,Clay_L9,Imperm_L9,SoilTemp_L9,Matricd_L10,GravelContent_L10,EvapCoeff_L10,Grass_TranspCoeff_L10,Shrub_TranspCoeff_L10,Tree_TranspCoeff_L10,Forb_TranspCoeff_L10,TranspRegion_L10,Sand_L10,Clay_L10,Imperm_L10,SoilTemp_L10,Matricd_L11,GravelContent_L11,EvapCoeff_L11,Grass_TranspCoeff_L11,Shrub_TranspCoeff_L11,Tree_TranspCoeff_L11,Forb_TranspCoeff_L11,TranspRegion_L11,Sand_L11,Clay_L11,Imperm_L11,SoilTemp_L11,Matricd_L12,GravelContent_L12,EvapCoeff_L12,Grass_TranspCoeff_L12,Shrub_TranspCoeff_L12,Tree_TranspCoeff_L12,Forb_TranspCoeff_L12,TranspRegion_L12,Sand_L12,Clay_L12,Imperm_L12,SoilTemp_L12,Matricd_L13,GravelContent_L13,EvapCoeff_L13,Grass_TranspCoeff_L13,Shrub_TranspCoeff_L13,Tree_TranspCoeff_L13,Forb_TranspCoeff_L13,TranspRegion_L13,Sand_L13,Clay_L13,Imperm_L13,SoilTemp_L13,Matricd_L14,GravelContent_L14,EvapCoeff_L14,Grass_TranspCoeff_L14,Shrub_TranspCoeff_L14,Tree_TranspCoeff_L14,Forb_TranspCoeff_L14,TranspRegion_L14,Sand_L14,Clay_L14,Imperm_L14,SoilTemp_L14,Matricd_L15,GravelContent_L15,EvapCoeff_L15,Grass_TranspCoeff_L15,Shrub_TranspCoeff_L15,Tree_TranspCoeff_L15,Forb_TranspCoeff_L15,TranspRegion_L15,Sand_L15,Clay_L15,Imperm_L15,SoilTemp_L15,Matricd_L16,GravelContent_L16,EvapCoeff_L16,Grass_TranspCoeff_L16,Shrub_TranspCoeff_L16,Tree_TranspCoeff_L16,Forb_TranspCoeff_L16,TranspRegion_L16,Sand_L16,Clay_L16,Imperm_L16,SoilTemp_L16,Matricd_L17,GravelContent_L17,EvapCoeff_L17,Grass_TranspCoeff_L17,Shrub_TranspCoeff_L17,Tree_TranspCoeff_L17,Forb_TranspCoeff_L17,TranspRegion_L17,Sand_L17,Clay_L17,Imperm_L17,SoilTemp_L17,Matricd_L18,GravelContent_L18,EvapCoeff_L18,Grass_TranspCoeff_L18,Shrub_TranspCoeff_L18,Tree_TranspCoeff_L18,Forb_TranspCoeff_L18,TranspRegion_L18,Sand_L18,Clay_L18,Imperm_L18,SoilTemp_L18,Matricd_L19,GravelContent_L19,EvapCoeff_L19,Grass_TranspCoeff_L19,Shrub_TranspCoeff_L19,Tree_TranspCoeff_L19,Forb_TranspCoeff_L19,TranspRegion_L19,Sand_L19,Clay_L19,Imperm_L19,SoilTemp_L19,Matricd_L20,GravelContent_L20,EvapCoeff_L20,Grass_TranspCoeff_L20,Shrub_TranspCoeff_L20,Tree_TranspCoeff_L20,Forb_TranspCoeff_L20,TranspRegion_L20,Sand_L20,Clay_L20,Imperm_L20,SoilTemp_L20 +UseInformationToCreateSoilWatRuns,1,1,1,0,0,0,0,1,1,1,1,0,1,1,0,0,0,0,0,1,1,1,1,0,1,1,0,0,0,0,0,1,1,1,1,0,1,1,0,0,0,0,0,1,1,1,1,0,1,1,0,0,0,0,0,1,1,1,1,0,1,1,0,0,0,0,0,1,1,1,1,0,1,1,0,0,0,0,0,1,1,1,1,0,1,1,0,0,0,0,0,1,1,1,1,0,1,1,0,0,0,0,0,0,1,1,1,0,1,1,0,0,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,1,1,0,0,1,1,0,1,1,1,1,0,1,1,1,1,1,1,0,1,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 +Site01,1.46,0.11,1,NA,NA,NA,NA,1,0.67,0.16,0,NA,1.47,0.09,NA,NA,NA,NA,NA,1,0.7,0.15,0,NA,1.46,0.11,NA,NA,NA,NA,NA,1,0.66,0.18,0,NA,1.52,0.11,NA,NA,NA,NA,NA,2,0.63,0.19,0,NA,1.55,0.14,NA,NA,NA,NA,NA,2,0.6,0.22,0,NA,1.53,0.13,NA,NA,NA,NA,NA,2,0.6,0.22,0,NA,1.4,0.1,NA,NA,NA,NA,NA,3,0.168,0.1785,0,NA,1.49,0.1,NA,NA,NA,NA,NA,3,0.08,0.085,0,NA,1.5,0.1,NA,NA,NA,NA,NA,NA,0.08,0.085,0,NA,1.5,0.1,NA,NA,NA,NA,NA,NA,0.08,0.085,0,NA,1.455,0.1,NA,NA,NA,NA,NA,NA,0.08,0.085,0,NA,1.05,0.1,NA,NA,NA,NA,NA,NA,0.03415,0.006,0,NA,1.05,0.1,NA,NA,NA,NA,NA,NA,0.03415,0.006,0,NA,1.05,0.1,NA,NA,NA,NA,NA,NA,0.03415,0.006,0,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,0,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.4,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Site02,1.46,0.11,1,NA,NA,NA,NA,1,0.67,0.16,0,NA,1.47,0.09,NA,NA,NA,NA,NA,1,0.7,0.15,0,NA,1.46,0.11,NA,NA,NA,NA,NA,1,0.66,0.18,0,NA,1.52,0.11,NA,NA,NA,NA,NA,2,0.63,0.19,0,NA,1.55,0.14,NA,NA,NA,NA,NA,2,0.6,0.22,0,NA,1.53,0.13,NA,NA,NA,NA,NA,2,0.6,0.22,0,NA,1.4,0.1,NA,NA,NA,NA,NA,3,0.24,0.255,0,NA,1.49,0.1,NA,NA,NA,NA,NA,3,0.24,0.255,0,NA,1.5,0.1,NA,NA,NA,NA,NA,NA,0.24,0.255,0,NA,1.5,0.1,NA,NA,NA,NA,NA,NA,0.435,0.2025,0,NA,1.5,0.1,NA,NA,NA,NA,NA,NA,0.435,0.2025,0,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,0,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,0,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,0,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,0,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.4,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Site03,1.46,0.11,1,NA,NA,NA,NA,1,0.67,0.16,0,NA,1.47,0.09,NA,NA,NA,NA,NA,1,0.7,0.15,0,NA,1.46,0.11,NA,NA,NA,NA,NA,1,0.66,0.18,0,NA,1.52,0.11,NA,NA,NA,NA,NA,2,0.63,0.19,0,NA,1.55,0.14,NA,NA,NA,NA,NA,2,0.6,0.22,0,NA,1.53,0.13,NA,NA,NA,NA,NA,2,0.6,0.22,0,NA,1.1,0.1,NA,NA,NA,NA,NA,3,0.08,0.085,0,NA,NA,NA,NA,NA,2,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,2,NA,NA,1,NA,NA,NA,NA,NA,NA,NA,NA,2,NA,NA,1,NA,NA,NA,NA,NA,NA,NA,NA,2,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,0,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,0,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,0,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,0,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.05,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Site04,1.46,0.11,1,NA,NA,NA,NA,1,0.67,0.16,0,NA,1.47,0.09,NA,NA,NA,NA,NA,1,0.7,0.15,0,NA,1.46,0.11,NA,NA,NA,NA,NA,1,0.66,0.18,0,NA,1.52,0.11,NA,NA,NA,NA,NA,2,0.63,0.19,0,NA,1.55,0.14,NA,NA,NA,NA,NA,2,0.6,0.22,0,NA,1.53,0.13,NA,NA,NA,NA,NA,2,0.6,0.22,0,NA,1.095,0.1,NA,NA,NA,NA,NA,3,0.483,0.01575,0,NA,1.05,0.1,NA,NA,NA,NA,NA,3,0.483,0.01575,0,NA,1.05,0.1,NA,NA,NA,NA,NA,NA,0.483,0.01575,0,NA,1.05,0.1,NA,NA,NA,NA,NA,NA,0.483,0.01575,0,NA,1.05,0.1,NA,NA,NA,NA,NA,NA,0.23,0.0075,0,NA,1.05,0.1,NA,NA,NA,NA,NA,NA,0.03415,0.006,0,NA,1.05,0.1,NA,NA,NA,NA,NA,NA,0.03415,0.006,0,NA,1.05,0.1,NA,NA,NA,NA,NA,NA,0.03415,0.006,0,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,0,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.45,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Site05,1.46,0.11,1,NA,NA,NA,NA,1,0.67,0.16,0,NA,1.47,0.09,NA,NA,NA,NA,NA,1,0.7,0.15,0,NA,1.46,0.11,NA,NA,NA,NA,NA,1,0.66,0.18,0,NA,1.52,0.11,NA,NA,NA,NA,NA,2,0.63,0.19,0,NA,1.55,0.14,NA,NA,NA,NA,NA,2,0.6,0.22,0,NA,1.53,0.13,NA,NA,NA,NA,NA,2,0.6,0.22,0,NA,1.095,0.1,NA,NA,NA,NA,NA,3,0.483,0.01575,0,NA,1.05,0.1,NA,NA,NA,NA,NA,3,0.483,0.01575,0,NA,1.05,0.1,NA,NA,NA,NA,NA,NA,0.483,0.01575,0,NA,1.05,0.1,NA,NA,NA,NA,NA,NA,0.483,0.01575,0,NA,1.05,0.1,NA,NA,NA,NA,NA,NA,0.23,0.0075,0,NA,1.05,0.1,NA,NA,NA,NA,NA,NA,0.03415,0.006,0,NA,1.05,0.1,NA,NA,NA,NA,NA,NA,0.03415,0.006,0,NA,1.05,0.1,NA,NA,NA,NA,NA,NA,0.03415,0.006,0,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,0,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.45,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Site06,1.46,0.11,1,NA,NA,NA,NA,1,0.67,0.16,0,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA diff --git a/tests/Test_projects/Test4_AllOverallAggregations_snow/1_Data_SWInput/dbWeatherData.sqlite3 b/tests/Test_projects/Test4_AllOverallAggregations_snow/1_Data_SWInput/dbWeatherData.sqlite3 index 343474a7..6e1d8c0f 100644 Binary files a/tests/Test_projects/Test4_AllOverallAggregations_snow/1_Data_SWInput/dbWeatherData.sqlite3 and b/tests/Test_projects/Test4_AllOverallAggregations_snow/1_Data_SWInput/dbWeatherData.sqlite3 differ diff --git a/tests/Test_projects/Test4_AllOverallAggregations_snow/2_SWSF_p1of5_Settings_v51.R b/tests/Test_projects/Test4_AllOverallAggregations_snow/2_SWSF_p1of5_Settings_v51.R index 2d0800e1..ea483f50 100644 --- a/tests/Test_projects/Test4_AllOverallAggregations_snow/2_SWSF_p1of5_Settings_v51.R +++ b/tests/Test_projects/Test4_AllOverallAggregations_snow/2_SWSF_p1of5_Settings_v51.R @@ -186,16 +186,8 @@ do.ExtractExternalDatasets <- c( "GriddedDailyWeatherFromNRCan_10km_Canada", 0, # can only be used together with database "GriddedDailyWeatherFromNCEPCFSR_Global", 0, # can only be used together with database - #Mean monthly PPT, Tmin, Tmax conditions: if using NEX or GDO-DCP-UC-LLNL, climate condition names must be of the form SCENARIO.GCM with SCENARIO being used for ensembles; if using climatewizard, climate condition names must be equal to what is in the respective directories - #CMIP3 - "ExtractClimateChangeScenarios_CMIP3_ClimateWizardEnsembles_Global", 0, #50-km resolution for mean of 2070-2099 - "ExtractClimateChangeScenarios_CMIP3_ClimateWizardEnsembles_USA", 0, #12-km resolution for mean change between 2070-2099 and 1971-2000 - "ExtractClimateChangeScenarios_CMIP3_BCSD_GDODCPUCLLNL_USA", 0, #1/8-degree resolution - "ExtractClimateChangeScenarios_CMIP3_BCSD_GDODCPUCLLNL_Global", 0, #1/2-degree resolution - #CMIP5 - "ExtractClimateChangeScenarios_CMIP5_BCSD_GDODCPUCLLNL_USA", 0, #1/8-degree resolution - "ExtractClimateChangeScenarios_CMIP5_BCSD_GDODCPUCLLNL_Global", 0, #1/2-degree resolution - "ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_USA", 0, #30-arcsec resolution; requires live internet access + #Monthly PPT, Tmin, Tmax conditions: if using NEX or GDO-DCP-UC-LLNL, climate condition names must be of the form SCENARIO.GCM with SCENARIO being used for ensembles; if using climatewizard, climate condition names must be equal to what is in the respective directories + "ExtractClimateChangeScenarios", 0, #Mean monthly wind, relative humidity, and 100% - sunshine "ExtractSkyDataFromNOAAClimateAtlas_USA", 0, @@ -216,6 +208,20 @@ chunk_size.options <- list( DailyWeatherFromNCEPCFSR_Global = 100 # this is also OS-limited by the number of concurrently open files (on 'unix' platforms, check with 'ulimit -a') ) +opt_climsc_extr <- c( + # for each climate data set from which to extract, add an element like 'dataset1' + # priority of extraction: dataset1, dataset2, ... if multiple sources provide data for a location + # dataset = 'project_source' with + # - project = one string out of c("CMIP3", "CMIP5", "GeoMIP") + # - source = one string out of: + # - "ClimateWizardEnsembles_Global": mean monthly values at 50-km resolution for 2070-2099 + # - "ClimateWizardEnsembles_USA": mean monthly change at 12-km resolution between 2070-2099 and 1971-2000 + # - "BCSD_GDODCPUCLLNL_USA": monthly time series at 1/8-degree resolution + # - "BCSD_GDODCPUCLLNL_Global": monthly time series at 1/2-degree resolution + # - "BCSD_NEX_USA": monthly time series at 30-arcsec resolution; requires live internet access + dataset1 = "CMIP5_BCSD_GDODCPUCLLNL_USA" +) + do.PriorCalculations <- c( "ExtendSoilDatafileToRequestedSoilLayers", 0, "EstimateConstantSoilTemperatureAtUpperAndLowerBoundaryAsMeanAnnualAirTemperature", 1, @@ -249,7 +255,7 @@ rownames(future_yrs) <- make.names(paste0("d", future_yrs[, "delta"], "yrs"), un #------Meta-information of input data datafile.windspeedAtHeightAboveGround <- 2 #SoilWat requires 2 m, but some datasets are at 10 m, e.g., NCEP/CRSF: this value checks windspeed height and if necessary converts to u2 adjust.soilDepth <- FALSE # [FALSE] fill soil layer structure from shallower layer(s) or [TRUE] adjust soil depth if there is no soil texture information for the lowest layers -requested_soil_layers <- seq(10, 100, by = 10) +requested_soil_layers <- c(5, 10, 20, 30, 40, 50, 60, 70, 80, 100, 150) increment_soiltemperature_deltaX_cm <- 5 # If SOILWAT soil temperature is simulated and the solution instable, then the soil profile layer width is increased by this value until a stable solution can be found or total failure is determined #Climate conditions @@ -265,21 +271,21 @@ climate.conditions <- c(climate.ambient) #Will be applied to each climate.conditions downscaling.method <- c("hybrid-delta-3mod") #one or multiple of "raw", "delta" (Hay et al. 2002), "hybrid-delta" (Hamlet et al. 2010), or "hybrid-delta-3mod" -downscaling.options <- list( - daily_ppt_limit = 1.5, # - monthly_limit = 1.5, # - ppt_type = "detailed", # either "detailed" or "simple" - correct_spline = "attempt", # one of "fail", "none" or "attempt"; only used if extrapol_type is using splines - # - "fail": downscaling fails if spline extrapolations fall outside estimated monthly extremes - # - "none": no correction for extrapolated monthly extreme values, but this will likely fail during correction of extreme daily PPT events - # - "attempt": repeated attempts with jittering data to fit spline extrapolations within estimated monthly extreme values - extrapol_type = "linear_Thermessl2012CC.QMv1b", # one of "linear_Boe", "linear_Thermessl2012CC.QMv1b", "linear_none", "tricub_fmm", "tricub_monoH.FC", "tricub_natural", "normal_anomalies" - # - "linear": Gudmundsson et al. 2012: "If new model values (e.g. from climate projections) are larger than the training values used to estimate the empirical CDF, the correction found for the highest quantile of the training period is used (Boe ?? et al., 2007; Theme??l et al., 2012)." - # - "tricub": I got really large output values, e.g., obs.hist = 54 cm, scen.fut = 64 cm, sbc.fut = 88 cm, hd.fut = 89 cm - # - "linear" (i.e., using Boe et al.'s correction) resulted for the same site to: obs.hist = 54 cm, scen.fut = 64 cm, sbc.fut = 75 cm, hd.fut = 75 cm - # - "normal", but no implemented in qmap: Tohver et al. 2014, Appendix A, p. 6: "... values that are outside the observed quantile map (e.g. in the early parts of the 20th century) are interpolated using standard anomalies (i.e. number of standard deviations from the mean) calculated for the observed data and GCM data. Although this approach ostensibly assumes a normal distribution, it was found during testing to be much more stable than attempts to use more sophisticated approaches. In particular, the use of Extreme Value Type I or Generalized Extreme Value distributions for extending the tail of the probability distributions were both found to be highly unstable in practice and introduced unacceptable daily extremes in isolated grid cells. These errors occur because of irregularities in the shapes of the CDFs for observed and GCM data, which relates in part to the relatively small sample size used to construct the monthly CDFs (i.e. n = 30)." - sigmaN = 6, # test whether data distributions are within sigmaN * sd of mean - PPTratioCutoff = 10 # above and below that value use additive instead of multiplicative adjustments for precipitation; 3 was too small -> resulting in too many medium-sized ppt-event +opt_DS <- list( + daily_ppt_limit = 1.5, # + monthly_limit = 1.5, # + ppt_type = "detailed", # either "detailed" or "simple" + fix_spline = "attempt", # one of "fail", "none" or "attempt"; only used if extrapol_type is using splines + # - "fail": downscaling fails if spline extrapolations fall outside estimated monthly extremes + # - "none": no correction for extrapolated monthly extreme values, but this will likely fail during correction of extreme daily PPT events + # - "attempt": repeated attempts with jittering data to fit spline extrapolations within estimated monthly extreme values + extrapol_type = "linear_Thermessl2012CC.QMv1b", # one of "linear_Boe", "linear_Thermessl2012CC.QMv1b", "linear_none", "tricub_fmm", "tricub_monoH.FC", "tricub_natural", "normal_anomalies" + # - "linear": Gudmundsson et al. 2012: "If new model values (e.g. from climate projections) are larger than the training values used to estimate the empirical CDF, the correction found for the highest quantile of the training period is used (Boe ?? et al., 2007; Theme??l et al., 2012)." + # - "tricub": I got really large output values, e.g., obs.hist = 54 cm, scen.fut = 64 cm, sbc.fut = 88 cm, hd.fut = 89 cm + # - "linear" (i.e., using Boe et al.'s correction) resulted for the same site to: obs.hist = 54 cm, scen.fut = 64 cm, sbc.fut = 75 cm, hd.fut = 75 cm + # - "normal", but no implemented in qmap: Tohver et al. 2014, Appendix A, p. 6: "... values that are outside the observed quantile map (e.g. in the early parts of the 20th century) are interpolated using standard anomalies (i.e. number of standard deviations from the mean) calculated for the observed data and GCM data. Although this approach ostensibly assumes a normal distribution, it was found during testing to be much more stable than attempts to use more sophisticated approaches. In particular, the use of Extreme Value Type I or Generalized Extreme Value distributions for extending the tail of the probability distributions were both found to be highly unstable in practice and introduced unacceptable daily extremes in isolated grid cells. These errors occur because of irregularities in the shapes of the CDFs for observed and GCM data, which relates in part to the relatively small sample size used to construct the monthly CDFs (i.e. n = 30)." + sigmaN = 6, # test whether data distributions are within sigmaN * sd of mean + PPTratioCutoff = 10 # above and below that value use additive instead of multiplicative adjustments for precipitation; 3 was too small -> resulting in too many medium-sized ppt-event ) #------Names of files that contain input data or treatment codes @@ -385,6 +391,7 @@ output_aggregates <- c( #---Aggregation: Ecological dryness "dailyNRCS_SoilMoistureTemperatureRegimes", 1, #Requires at least soil layers at 10, 20, 30, 50, 60, 90 cm "dailyNRCS_Chambers2014_ResilienceResistance", 1, #Requires "dailyNRCS_SoilMoistureTemperatureRegimes" + "dailyNRCS_Maestas2016_ResilienceResistance", 1, "dailyWetDegreeDays", 1, "dailyThermalDrynessStartEnd", 1, "dailyThermalSWPConditionCount", 1, diff --git a/tests/Test_projects/Test5_AllOverallAggregations_mpi/2_SWSF_p1of5_Settings_v51.R b/tests/Test_projects/Test5_AllOverallAggregations_mpi/2_SWSF_p1of5_Settings_v51.R index 793f197f..42ed1558 100644 --- a/tests/Test_projects/Test5_AllOverallAggregations_mpi/2_SWSF_p1of5_Settings_v51.R +++ b/tests/Test_projects/Test5_AllOverallAggregations_mpi/2_SWSF_p1of5_Settings_v51.R @@ -70,7 +70,7 @@ if(interactive()) { setwd(dir.prj) } dir.prj <- dir.big <- getwd() -dir.code <- normalizePath(file.path("..", "..", "..")) # "github/SoilWat_R_Wrapper" +dir.code <- normalizePath(file.path("..", "..", "..", "R")) # "github/SoilWat_R_Wrapper/R" #parent folder containing external data #drs dir.external <- "/Volumes/YOURBIGDATA/BigData/GIS/Data" @@ -189,16 +189,8 @@ do.ExtractExternalDatasets <- c( "GriddedDailyWeatherFromNRCan_10km_Canada", 0, # can only be used together with database "GriddedDailyWeatherFromNCEPCFSR_Global", 0, # can only be used together with database - #Mean monthly PPT, Tmin, Tmax conditions: if using NEX or GDO-DCP-UC-LLNL, climate condition names must be of the form SCENARIO.GCM with SCENARIO being used for ensembles; if using climatewizard, climate condition names must be equal to what is in the respective directories - #CMIP3 - "ExtractClimateChangeScenarios_CMIP3_ClimateWizardEnsembles_Global", 0, #50-km resolution for mean of 2070-2099 - "ExtractClimateChangeScenarios_CMIP3_ClimateWizardEnsembles_USA", 0, #12-km resolution for mean change between 2070-2099 and 1971-2000 - "ExtractClimateChangeScenarios_CMIP3_BCSD_GDODCPUCLLNL_USA", 0, #1/8-degree resolution - "ExtractClimateChangeScenarios_CMIP3_BCSD_GDODCPUCLLNL_Global", 0, #1/2-degree resolution - #CMIP5 - "ExtractClimateChangeScenarios_CMIP5_BCSD_GDODCPUCLLNL_USA", 0, #1/8-degree resolution - "ExtractClimateChangeScenarios_CMIP5_BCSD_GDODCPUCLLNL_Global", 0, #1/2-degree resolution - "ExtractClimateChangeScenarios_CMIP5_BCSD_NEX_USA", 0, #30-arcsec resolution; requires live internet access + #Monthly PPT, Tmin, Tmax conditions: if using NEX or GDO-DCP-UC-LLNL, climate condition names must be of the form SCENARIO.GCM with SCENARIO being used for ensembles; if using climatewizard, climate condition names must be equal to what is in the respective directories + "ExtractClimateChangeScenarios", 0, #Mean monthly wind, relative humidity, and 100% - sunshine "ExtractSkyDataFromNOAAClimateAtlas_USA", 0, @@ -219,6 +211,20 @@ chunk_size.options <- list( DailyWeatherFromNCEPCFSR_Global = 100 # this is also OS-limited by the number of concurrently open files (on 'unix' platforms, check with 'ulimit -a') ) +opt_climsc_extr <- c( + # for each climate data set from which to extract, add an element like 'dataset1' + # priority of extraction: dataset1, dataset2, ... if multiple sources provide data for a location + # dataset = 'project_source' with + # - project = one string out of c("CMIP3", "CMIP5", "GeoMIP") + # - source = one string out of: + # - "ClimateWizardEnsembles_Global": mean monthly values at 50-km resolution for 2070-2099 + # - "ClimateWizardEnsembles_USA": mean monthly change at 12-km resolution between 2070-2099 and 1971-2000 + # - "BCSD_GDODCPUCLLNL_USA": monthly time series at 1/8-degree resolution + # - "BCSD_GDODCPUCLLNL_Global": monthly time series at 1/2-degree resolution + # - "BCSD_NEX_USA": monthly time series at 30-arcsec resolution; requires live internet access + dataset1 = "CMIP5_BCSD_GDODCPUCLLNL_USA" +) + do.PriorCalculations <- c( "ExtendSoilDatafileToRequestedSoilLayers", 0, "EstimateConstantSoilTemperatureAtUpperAndLowerBoundaryAsMeanAnnualAirTemperature", 1, @@ -252,7 +258,7 @@ rownames(future_yrs) <- make.names(paste0("d", future_yrs[, "delta"], "yrs"), un #------Meta-information of input data datafile.windspeedAtHeightAboveGround <- 2 #SoilWat requires 2 m, but some datasets are at 10 m, e.g., NCEP/CRSF: this value checks windspeed height and if necessary converts to u2 adjust.soilDepth <- FALSE # [FALSE] fill soil layer structure from shallower layer(s) or [TRUE] adjust soil depth if there is no soil texture information for the lowest layers -requested_soil_layers <- seq(10, 100, by = 10) +requested_soil_layers <- c(5, 10, 20, 30, 40, 50, 60, 70, 80, 100, 150) increment_soiltemperature_deltaX_cm <- 5 # If SOILWAT soil temperature is simulated and the solution instable, then the soil profile layer width is increased by this value until a stable solution can be found or total failure is determined #Climate conditions @@ -268,21 +274,21 @@ climate.conditions <- c(climate.ambient) #Will be applied to each climate.conditions downscaling.method <- c("hybrid-delta-3mod") #one or multiple of "raw", "delta" (Hay et al. 2002), "hybrid-delta" (Hamlet et al. 2010), or "hybrid-delta-3mod" -downscaling.options <- list( - daily_ppt_limit = 1.5, # - monthly_limit = 1.5, # - ppt_type = "detailed", # either "detailed" or "simple" - correct_spline = "attempt", # one of "fail", "none" or "attempt"; only used if extrapol_type is using splines - # - "fail": downscaling fails if spline extrapolations fall outside estimated monthly extremes - # - "none": no correction for extrapolated monthly extreme values, but this will likely fail during correction of extreme daily PPT events - # - "attempt": repeated attempts with jittering data to fit spline extrapolations within estimated monthly extreme values - extrapol_type = "linear_Thermessl2012CC.QMv1b", # one of "linear_Boe", "linear_Thermessl2012CC.QMv1b", "linear_none", "tricub_fmm", "tricub_monoH.FC", "tricub_natural", "normal_anomalies" - # - "linear": Gudmundsson et al. 2012: "If new model values (e.g. from climate projections) are larger than the training values used to estimate the empirical CDF, the correction found for the highest quantile of the training period is used (Boe ?? et al., 2007; Theme??l et al., 2012)." - # - "tricub": I got really large output values, e.g., obs.hist = 54 cm, scen.fut = 64 cm, sbc.fut = 88 cm, hd.fut = 89 cm - # - "linear" (i.e., using Boe et al.'s correction) resulted for the same site to: obs.hist = 54 cm, scen.fut = 64 cm, sbc.fut = 75 cm, hd.fut = 75 cm - # - "normal", but no implemented in qmap: Tohver et al. 2014, Appendix A, p. 6: "... values that are outside the observed quantile map (e.g. in the early parts of the 20th century) are interpolated using standard anomalies (i.e. number of standard deviations from the mean) calculated for the observed data and GCM data. Although this approach ostensibly assumes a normal distribution, it was found during testing to be much more stable than attempts to use more sophisticated approaches. In particular, the use of Extreme Value Type I or Generalized Extreme Value distributions for extending the tail of the probability distributions were both found to be highly unstable in practice and introduced unacceptable daily extremes in isolated grid cells. These errors occur because of irregularities in the shapes of the CDFs for observed and GCM data, which relates in part to the relatively small sample size used to construct the monthly CDFs (i.e. n = 30)." - sigmaN = 6, # test whether data distributions are within sigmaN * sd of mean - PPTratioCutoff = 10 # above and below that value use additive instead of multiplicative adjustments for precipitation; 3 was too small -> resulting in too many medium-sized ppt-event +opt_DS <- list( + daily_ppt_limit = 1.5, # + monthly_limit = 1.5, # + ppt_type = "detailed", # either "detailed" or "simple" + fix_spline = "attempt", # one of "fail", "none" or "attempt"; only used if extrapol_type is using splines + # - "fail": downscaling fails if spline extrapolations fall outside estimated monthly extremes + # - "none": no correction for extrapolated monthly extreme values, but this will likely fail during correction of extreme daily PPT events + # - "attempt": repeated attempts with jittering data to fit spline extrapolations within estimated monthly extreme values + extrapol_type = "linear_Thermessl2012CC.QMv1b", # one of "linear_Boe", "linear_Thermessl2012CC.QMv1b", "linear_none", "tricub_fmm", "tricub_monoH.FC", "tricub_natural", "normal_anomalies" + # - "linear": Gudmundsson et al. 2012: "If new model values (e.g. from climate projections) are larger than the training values used to estimate the empirical CDF, the correction found for the highest quantile of the training period is used (Boe ?? et al., 2007; Theme??l et al., 2012)." + # - "tricub": I got really large output values, e.g., obs.hist = 54 cm, scen.fut = 64 cm, sbc.fut = 88 cm, hd.fut = 89 cm + # - "linear" (i.e., using Boe et al.'s correction) resulted for the same site to: obs.hist = 54 cm, scen.fut = 64 cm, sbc.fut = 75 cm, hd.fut = 75 cm + # - "normal", but no implemented in qmap: Tohver et al. 2014, Appendix A, p. 6: "... values that are outside the observed quantile map (e.g. in the early parts of the 20th century) are interpolated using standard anomalies (i.e. number of standard deviations from the mean) calculated for the observed data and GCM data. Although this approach ostensibly assumes a normal distribution, it was found during testing to be much more stable than attempts to use more sophisticated approaches. In particular, the use of Extreme Value Type I or Generalized Extreme Value distributions for extending the tail of the probability distributions were both found to be highly unstable in practice and introduced unacceptable daily extremes in isolated grid cells. These errors occur because of irregularities in the shapes of the CDFs for observed and GCM data, which relates in part to the relatively small sample size used to construct the monthly CDFs (i.e. n = 30)." + sigmaN = 6, # test whether data distributions are within sigmaN * sd of mean + PPTratioCutoff = 10 # above and below that value use additive instead of multiplicative adjustments for precipitation; 3 was too small -> resulting in too many medium-sized ppt-event ) #Climate ensembles created across scenarios @@ -374,6 +380,7 @@ output_aggregates <- c( #---Aggregation: Ecological dryness "dailyNRCS_SoilMoistureTemperatureRegimes", 1, #Requires at least soil layers at 10, 20, 30, 50, 60, 90 cm "dailyNRCS_Chambers2014_ResilienceResistance", 1, #Requires "dailyNRCS_SoilMoistureTemperatureRegimes" + "dailyNRCS_Maestas2016_ResilienceResistance", 1, "dailyWetDegreeDays", 1, "dailyThermalDrynessStartEnd", 1, "dailyThermalSWPConditionCount", 1, diff --git a/tests/testthat/test_GISSM_germination_wait_times.R b/tests/testthat/test_GISSM_germination_wait_times.R new file mode 100644 index 00000000..9f74a206 --- /dev/null +++ b/tests/testthat/test_GISSM_germination_wait_times.R @@ -0,0 +1,109 @@ +context("GISSM: germination_wait_times") + +# Inputs +test_data <- list( + real_data = list( + dfc = c(NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, + NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, + NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, + NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, + NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, + NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, + NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, + NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, + NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, + NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, + NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, 132L, 131L, + 130L, 129L, 128L, 127L, 126L, 125L, 124L, 123L, 122L, 121L, 120L, + 119L, 118L, 117L, 116L, 115L, 114L, NA, 113L, 112L, 111L, 110L, + 109L, 108L, 107L, 106L, 105L, 104L, 103L, 102L, 101L, 100L, NA, + NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, + NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, 99L, 98L, 97L, 96L, 95L, + 94L, 93L, 92L, 91L, 90L, 89L, NA, NA, NA, NA, NA, NA, NA, 88L, + 87L, 86L, 85L, 84L, 83L, 82L, 81L, 80L, 79L, 78L, 77L, 76L, 75L, + 74L, 73L, 72L, 71L, 70L, 69L, 68L, 67L, 66L, 65L, 64L, 63L, 62L, + 61L, 60L, 59L, 58L, 57L, 56L, 55L, 54L, 53L, 52L, 51L, 50L, 49L, + 48L, 47L, 46L, 45L, 44L, 43L, 42L, 41L, 40L, 39L, 38L, 37L, 36L, + 35L, 34L, 33L, 32L, 31L, 30L, 29L, 28L, NA, 27L, 26L, 25L, 24L, + 23L, 22L, 21L, 20L, 19L, 18L, 17L, 16L, 15L, 14L, 13L, 12L, 11L, + NA, 10L, 9L, NA, NA, NA, NA, 8L, 7L, 6L, NA, NA, NA, NA, NA, + NA, NA, NA, NA, 5L, NA, NA, NA, 4L, NA, NA, 3L, 2L, 1L, NA, NA, + NA, NA, NA, NA), + + ttg = c(NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, + NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, + NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, + NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, + NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, + NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, + NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, + NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, + NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, + NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, + NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, 6L, 6L, 5L, + 5L, 6L, 5L, 4L, 7L, 4L, 5L, 7L, 4L, 9L, 3L, 7L, 6L, 10L, 2L, + 5L, NA, 4L, 3L, 4L, 3L, 2L, 8L, 4L, 4L, 5L, 10L, 4L, 4L, 5L, + 3L, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, + NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, 7L, 4L, 9L, 5L, + 3L, 5L, 5L, 4L, 8L, 7L, 6L, NA, NA, NA, NA, NA, NA, NA, 5L, 6L, + 5L, 5L, 6L, 7L, 5L, 5L, 4L, 9L, 5L, 4L, 3L, 5L, 4L, 6L, 5L, 3L, + 4L, 6L, 5L, 9L, 6L, 4L, 4L, 4L, 6L, 8L, 4L, 3L, 3L, 7L, 5L, 5L, + 5L, 4L, 5L, 3L, 3L, 4L, 6L, 5L, 6L, 2L, 7L, 7L, 5L, 6L, 4L, 5L, + 4L, 12L, 6L, 6L, 5L, 3L, 2L, 6L, 4L, 6L, 4L, NA, 11L, 7L, 7L, + 7L, 5L, 7L, 6L, 5L, 4L, 4L, 5L, 8L, 8L, 6L, 4L, 4L, 9L, NA, 4L, + 2L, NA, NA, NA, NA, 6L, 5L, 4L, NA, NA, NA, NA, NA, NA, NA, NA, + NA, 5L, NA, NA, NA, 4L, NA, NA, 3L, 2L, 1L, NA, NA, NA, NA, NA, + NA), + + ref = c(0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 1L, 0L, 1L, + 1L, 1L, 0L, 1L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 27L, 0L, + 27L, 27L, 27L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 7L, 7L, 7L, 0L, + 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, + 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, + 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, + 0L, 0L, 1L, 0L, 0L, 0L, 0L, 0L, 1L, 1L, 1L, 1L, 0L, 0L, 0L, 0L, + 0L, 0L, 0L, 0L, 0L, 0L, 0L, 1L, 5L, 1L, 1L, 1L, 19L, 4L, 4L, + 14L, 14L, 14L, 5L, 2L, 0L, 0L, 0L) + ), + + test1 = list(ttg = NA, dfc = NA, ref = integer(0)), + test2 = list(ttg = 1, dfc = 1, ref = 0L), + test3 = list(ttg = rep(1, 10), dfc = 10:1, ref = rep(0L, 10)), + test4 = list(ttg = temp <- c(2, NA, 1), dfc = temp, ref = c(1L, 0L)), + test5 = list(ttg = temp <- c(2, rep(NA, 10), 1), dfc = temp, ref = c(10L, 0L)), + test6 = list(ttg = temp <- c(3, NA, NA, 2, NA, 1), dfc = temp, ref = c(3L, 1L, 0L)), + test7 = list(ttg = c(3, 3, 3, NA, NA, 2, NA, 1), + dfc = c(5, 4, 3, NA, NA, 2, NA, 1), ref = c(0L, 2L, 3L, 1L, 0L)), + test8 = list(ttg = c(NA, 8, 1, 2, 1, NA, NA, NA, 3, 3, 3, NA, NA, 2, NA, 1), + dfc = c(NA, 9, 8, 7, 6, NA, NA, NA, 5, 4, 3, NA, NA, 2, NA, 1), + ref = c(5L, 0L, 0L, 0L, 0L, 2L, 3L, 1L, 0L)) +) + + + +test_that("germination_wait_times", { + for (k in seq_along(test_data)) + with(test_data[[k]], expect_equal(as.integer(germination_wait_times(ttg, dfc)), ref, + info = paste("Test dataset =", shQuote(names(test_data)[k])))) + + if (FALSE) { + for (k in seq_along(test_data)[-1]) { + print(paste("Test =", k, "with dataset =", shQuote(names(test_data)[k]))) + print(paste("ttg =", paste(test_data[[k]][["ttg"]], collapse = ", "))) + print(paste("dfc =", paste(test_data[[k]][["dfc"]], collapse = ", "))) + print(paste("ref =", paste(test_data[[k]][["ref"]], collapse = ", "))) + out <- as.integer(germination_wait_times(test_data[[k]][["ttg"]], test_data[[k]][["dfc"]])) + print(paste("out =", paste(out, collapse = ", "))) + print("") + } +} + + #--- Errors + # time_to_germinate is not NA, but duration_fave_cond is NA + expect_error(germination_wait_times(1, NA)) + # germination takes longer than available favorable condition + expect_error(germination_wait_times(2, 1)) + expect_error(germination_wait_times(c(3, NA, 1), c(2, NA, 1))) + # arguments not of identical length + expect_error(germination_wait_times(rep(1, 10), 8:1)) +}) diff --git a/tests/testthat/test_GISSM_get_KilledBySoilLayers.R b/tests/testthat/test_GISSM_get_KilledBySoilLayers.R new file mode 100644 index 00000000..319207ce --- /dev/null +++ b/tests/testthat/test_GISSM_get_KilledBySoilLayers.R @@ -0,0 +1,42 @@ +context("GISSM: get_KilledBySoilLayers") + +# Inputs +Nd <- 365 +Nl <- 10 +Nl2 <- round(Nl / 2) +Nl3 <- round(Nl / 3) +cond1 <- matrix(FALSE, nrow = Nd, ncol = Nl) +cond2 <- matrix(TRUE, nrow = Nd, ncol = Nl) +cond3 <- cbind(matrix(TRUE, nrow = Nd, ncol = Nl2), + matrix(FALSE, nrow = Nd, ncol = Nl2)) +cond4 <- cbind(matrix(TRUE, nrow = Nd, ncol = Nl3), + matrix(FALSE, nrow = Nd, ncol = Nl3), + matrix(TRUE, nrow = Nd, ncol = Nl3)) + +test_that("get_KilledBySoilLayers", { + expect_equal(get_KilledBySoilLayers(NA, cond1), NA) + expect_equal(get_KilledBySoilLayers(Nl, cond1), FALSE) + expect_equal(get_KilledBySoilLayers(Nl, cond2), TRUE) + expect_equal(get_KilledBySoilLayers(Nl2, cond3), TRUE) + expect_equal(get_KilledBySoilLayers(2 * Nl2, cond3), FALSE) + expect_equal(get_KilledBySoilLayers(Nl3, cond4), TRUE) + expect_equal(get_KilledBySoilLayers(2 * Nl3, cond4), FALSE) + expect_equal(get_KilledBySoilLayers(3 * Nl3, cond4), FALSE) + + expect_equal(get_KilledBySoilLayers(rep(NA, Nd), cond1), rep(NA, Nd)) + expect_equal(get_KilledBySoilLayers(rep(10, Nd), cond1), rep(FALSE, Nd)) + expect_equal(get_KilledBySoilLayers(rep(10, Nd), cond2), rep(TRUE, Nd)) + expect_equal(get_KilledBySoilLayers(rep(Nl2, Nd), cond3), rep(TRUE, Nd)) + expect_equal(get_KilledBySoilLayers(rep(2 * Nl2, Nd), cond3), rep(FALSE, Nd)) + expect_equal(get_KilledBySoilLayers(rep(Nl3, Nd), cond4), rep(TRUE, Nd)) + expect_equal(get_KilledBySoilLayers(rep(2 * Nl3, Nd), cond4), rep(FALSE, Nd)) + expect_equal(get_KilledBySoilLayers(rep(3 * Nl3, Nd), cond4), rep(FALSE, Nd)) + + #--- Errors + # relevantLayers: too long + expect_error(get_KilledBySoilLayers(rep(NA, Nd + 1), cond1)) + # relevantLayers: too large values + expect_error(get_KilledBySoilLayers(Nl + 1, cond1)) + # relevantLayers: negative values + expect_error(get_KilledBySoilLayers(-1, cond1)) +}) diff --git a/tests/testthat/test_GISSM_setFALSE_SeedlingSurvival_1stSeason.R b/tests/testthat/test_GISSM_setFALSE_SeedlingSurvival_1stSeason.R new file mode 100644 index 00000000..565d6224 --- /dev/null +++ b/tests/testthat/test_GISSM_setFALSE_SeedlingSurvival_1stSeason.R @@ -0,0 +1,43 @@ +context("GISSM: setFALSE_SeedlingSurvival_1stSeason") + +# Inputs +test_data <- list( + test1 = list( + ss1s = temp <- rep(TRUE, 10), ry_year_day = rep(1, 10), + ry_useyrs = 1, y = 1, doy = itemp <- 1, + ref = {ref <- temp; ref[0 + itemp] <- FALSE; ref}), + + test2 = list( + ss1s = temp <- rep(TRUE, 10), ry_year_day = rep(1, 10), + ry_useyrs = 1, y = 1, doy = itemp <- 10, + ref = {ref <- temp; ref[0 + itemp] <- FALSE; ref}), + + test3 = list( + ss1s = temp <- rep(TRUE, 30), ry_year_day = rep(1:3, each = 10), + ry_useyrs = 1:3, y = 3, doy = itemp <- 10, + ref = {ref <- temp; ref[20 + itemp] <- FALSE; ref}), + + test4 = list( + ss1s = temp <- rep(FALSE, 30), ry_year_day = rep(1:3, each = 10), + ry_useyrs = 1:3, y = 3, doy = itemp <- 10, + ref = temp) +) + + +test_that("setFALSE_SeedlingSurvival_1stSeason", { + for (k in seq_along(test_data)) + with(test_data[[k]], + expect_equal( + setFALSE_SeedlingSurvival_1stSeason(ss1s, ry_year_day, ry_useyrs, y, doy), + ref, + info = paste("Test dataset =", shQuote(names(test_data)[k])))) + + #--- Errors + if (requireNamespace("Rcpp")) { + expect_error(setFALSE_SeedlingSurvival_1stSeason(rep(TRUE, 7), rep(1, 10), 1, 1, 1)) + expect_error(setFALSE_SeedlingSurvival_1stSeason(rep(TRUE, 10), rep(1, 7), 1, 1, 1)) + expect_error(setFALSE_SeedlingSurvival_1stSeason(rep(TRUE, 10), rep(1, 10), 7, 1, 1)) + expect_error(setFALSE_SeedlingSurvival_1stSeason(rep(TRUE, 10), rep(1, 10), 1, 7, 1)) + expect_error(setFALSE_SeedlingSurvival_1stSeason(rep(TRUE, 10), rep(1, 10), 1, 1, 70)) + } +})