From b817573009a5911bb3c0c79afb2806fefe84fdee Mon Sep 17 00:00:00 2001 From: meetagrawal09 Date: Thu, 7 Sep 2023 22:00:54 +0530 Subject: [PATCH] parent b442c861892d7eba253423917fe5480e63d7dad0 author meetagrawal09 1694104254 +0530 committer meetagrawal09 1700486393 +0530 added manual trigger, updated date --- .github/workflows/integration-test.yml | 40 ++++++-- book_source/_bookdown.yml | 2 +- docker/depends/pecan.depends.R | 4 +- modules/data.atmosphere/DESCRIPTION | 6 +- modules/data.atmosphere/NAMESPACE | 2 +- .../data.atmosphere/R/GEFS_helper_functions.R | 29 +++--- .../data.atmosphere/R/download.NARR_site.R | 4 +- ...met_temporal_downscale.Gaussian_ensemble.R | 6 +- .../test.download.AmerifluxLBL.R | 39 ++++---- .../integrationTests/test.download.CRUNCEP.R | 7 +- .../integrationTests/test.download.ERA5.R | 85 +++++++++-------- .../tests/Rcheck_reference.log | 4 +- modules/data.land/DESCRIPTION | 1 - modules/data.land/R/gis.functions.R | 8 +- modules/data.mining/DESCRIPTION | 4 +- .../data.mining/tests/Rcheck_reference.log | 49 ++++++++++ modules/data.remote/DESCRIPTION | 4 +- modules/data.remote/R/NLCD.R | 29 +++--- modules/data.remote/R/download.thredds.R | 2 +- .../data.remote/tests/Rcheck_reference.log | 95 +++---------------- scripts/check_with_errors.R | 24 ++--- 21 files changed, 232 insertions(+), 212 deletions(-) create mode 100644 modules/data.mining/tests/Rcheck_reference.log diff --git a/.github/workflows/integration-test.yml b/.github/workflows/integration-test.yml index d945631f1cb..08fa86dbda3 100644 --- a/.github/workflows/integration-test.yml +++ b/.github/workflows/integration-test.yml @@ -1,21 +1,49 @@ name : Integration Tests -on : +on : + # allow manual triggering + workflow_dispatch: + + # for debugging + push: + pull_request: + schedule: - - cron: '30 4 * * 1' + # run Thursday 4:30 AM UTC + - cron: '30 4 * * 4' jobs: test: - runs-on: ubuntu-latest + runs-on: ubuntu-20.04 env: GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }} container: - image: pecan/depends:R4.1 + image: pecan/base:develop steps: - name: Checkout source code uses: actions/checkout@v3 + - name: CDS API file setup + run: | + echo "url: https://cds.climate.copernicus.eu/api/v2" >> .cdsapirc + echo "key: ${{ secrets.ERA5_API_KEY }}" >> .cdsapirc + + - name: Print Output to Console + run: | + cat .cdsapirc + + - name: Set up Python + uses: actions/setup-python@v2 + + - name: Install dependencies + run: | + sudo apt-get update + sudo apt-get install python3-pip -y + sudo -H python3 -m pip install --upgrade pip + sudo -H pip install --user cdsapi + - name: Run tests run: | - for FILE in modules/data.atmosphere/inst/integrationTests/*; Rscript $FILE; - \ No newline at end of file + for FILE in modules/data.atmosphere/inst/integrationTests/*; do + Rscript $FILE; + done diff --git a/book_source/_bookdown.yml b/book_source/_bookdown.yml index 45407cef0bc..fb22b3b1e6e 100644 --- a/book_source/_bookdown.yml +++ b/book_source/_bookdown.yml @@ -1,2 +1,2 @@ rmd_subdir: true -edit: https://github.com/tonygardella/pecan/edit/release/vtonydoc/book_source/%s +edit: https://github.com/PecanProject/pecan/edit/develop/book_source/%s diff --git a/docker/depends/pecan.depends.R b/docker/depends/pecan.depends.R index c208e5e5525..e49b8fb72fb 100644 --- a/docker/depends/pecan.depends.R +++ b/docker/depends/pecan.depends.R @@ -9,6 +9,7 @@ Sys.setenv(RLIB = rlib) # install remotes first in case packages are references in dependencies remotes::install_github(c( +'adokter/suntools', 'araiho/linkages_package', 'chuhousen/amerifluxr', 'ebimodeling/biocro@0.951', @@ -76,7 +77,6 @@ wanted <- c( 'magic', 'magrittr', 'maps', -'maptools', 'markdown', 'MASS', 'Matrix', @@ -113,7 +113,6 @@ wanted <- c( 'reshape', 'reshape2', 'reticulate', -'rgdal', 'rjags', 'rjson', 'rlang', @@ -130,6 +129,7 @@ wanted <- c( 'stats', 'stringi', 'stringr', +'suntools', 'swfscMisc', 'terra', 'testthat', diff --git a/modules/data.atmosphere/DESCRIPTION b/modules/data.atmosphere/DESCRIPTION index a29287049c5..097a134349e 100644 --- a/modules/data.atmosphere/DESCRIPTION +++ b/modules/data.atmosphere/DESCRIPTION @@ -36,7 +36,6 @@ Imports: jsonlite, lubridate (>= 1.6.0), magrittr, - maptools, MASS, mgcv, ncdf4 (>= 1.15), @@ -49,10 +48,12 @@ Imports: raster, REddyProc, reshape2, - rgdal, rlang (>= 0.2.0), + sf, sp, stringr (>= 1.1.0), + suntools, + terra, testthat (>= 2.0.0), tibble, tidyr, @@ -69,6 +70,7 @@ Suggests: progress, reticulate Remotes: + github::adokter/suntools, github::chuhousen/amerifluxr, github::ropensci/geonames, github::ropensci/nneo diff --git a/modules/data.atmosphere/NAMESPACE b/modules/data.atmosphere/NAMESPACE index 7183782887f..9dd14faa440 100644 --- a/modules/data.atmosphere/NAMESPACE +++ b/modules/data.atmosphere/NAMESPACE @@ -113,5 +113,5 @@ export(write_noaa_gefs_netcdf) import(dplyr) import(tidyselect) importFrom(magrittr,"%>%") -importFrom(rgdal,checkCRSArgs) importFrom(rlang,.data) +importFrom(sf,st_crs) diff --git a/modules/data.atmosphere/R/GEFS_helper_functions.R b/modules/data.atmosphere/R/GEFS_helper_functions.R index 99b0a4db6e6..c63768daf11 100644 --- a/modules/data.atmosphere/R/GEFS_helper_functions.R +++ b/modules/data.atmosphere/R/GEFS_helper_functions.R @@ -243,25 +243,30 @@ process_gridded_noaa_download <- function(lat_list, for(hr in 1:length(curr_hours)){ file_name <- paste0(base_filename2, curr_hours[hr]) + grib_file_name <- paste0(working_directory,"/", file_name,".grib") - if(file.exists(paste0(working_directory,"/", file_name,".grib"))){ - grib <- rgdal::readGDAL(paste0(working_directory,"/", file_name,".grib"), silent = TRUE) - lat_lon <- sp::coordinates(grib) + if(file.exists(grib_file_name)){ + grib_data <- terra::rast(grib_file_name) + + ## Convert to data frame + grib_data_df <- terra::as.data.frame(grib_data, xy=TRUE) + lat_lon <- grib_data_df[, c("x", "y")] + for(s in 1:length(site_id)){ index <- which(lat_lon[,2] == lats[s] & lat_lon[,1] == lons[s]) - pressfc[s, hr] <- grib$band1[index] - tmp2m[s, hr] <- grib$band2[index] - rh2m[s, hr] <- grib$band3[index] - ugrd10m[s, hr] <- grib$band4[index] - vgrd10m[s, hr] <- grib$band5[index] + pressfc[s, hr] <- grib_data_df$`SFC=Ground or water surface; Pressure [Pa]`[index] + tmp2m[s, hr] <- grib_data_df$`2[m] HTGL=Specified height level above ground; Temperature [C]`[index] + rh2m[s, hr] <- grib_data_df$`2[m] HTGL=Specified height level above ground; Relative humidity [%]`[index] + ugrd10m[s, hr] <- grib_data_df$`10[m] HTGL=Specified height level above ground; u-component of wind [m/s]`[index] + vgrd10m[s, hr] <- grib_data_df$`10[m] HTGL=Specified height level above ground; v-component of wind [m/s]`[index] if(curr_hours[hr] != "000"){ - apcpsfc[s, hr] <- grib$band6[index] - tcdcclm[s, hr] <- grib$band7[index] - dswrfsfc[s, hr] <- grib$band8[index] - dlwrfsfc[s, hr] <- grib$band9[index] + apcpsfc[s, hr] <- grib_data_df$`SFC=Ground or water surface; 03 hr Total precipitation [kg/(m^2)]`[index] + tcdcclm[s, hr] <- grib_data_df$`RESERVED(10) (Reserved); Total cloud cover [%]`[index] + dswrfsfc[s, hr] <- grib_data_df$`SFC=Ground or water surface; Downward Short-Wave Rad. Flux [W/(m^2)]`[index] + dlwrfsfc[s, hr] <- grib_data_df$`SFC=Ground or water surface; Downward Long-Wave Rad. Flux [W/(m^2)]`[index] } } } diff --git a/modules/data.atmosphere/R/download.NARR_site.R b/modules/data.atmosphere/R/download.NARR_site.R index 7ecfbca6bae..703546874e2 100644 --- a/modules/data.atmosphere/R/download.NARR_site.R +++ b/modules/data.atmosphere/R/download.NARR_site.R @@ -465,9 +465,9 @@ latlon2narr <- function(nc, lat.in, lon.in) { #' @inheritParams get_NARR_thredds #' @return `sp::SpatialPoints` object containing transformed x and y #' coordinates, in km, which should match NARR coordinates -#' @importFrom rgdal checkCRSArgs +#' @importFrom sf st_crs # ^not used directly here, but needed by sp::CRS. - # sp lists rgdal in Suggests rather than Imports, + # sp lists sf in Suggests rather than Imports, # so importing it here to ensure it's available at run time #' @author Alexey Shiklomanov #' @export diff --git a/modules/data.atmosphere/R/met_temporal_downscale.Gaussian_ensemble.R b/modules/data.atmosphere/R/met_temporal_downscale.Gaussian_ensemble.R index 1082ffb60e3..a241d1fbe5f 100644 --- a/modules/data.atmosphere/R/met_temporal_downscale.Gaussian_ensemble.R +++ b/modules/data.atmosphere/R/met_temporal_downscale.Gaussian_ensemble.R @@ -216,11 +216,11 @@ met_temporal_downscale.Gaussian_ensemble <- function(in.path, in.prefix, outfold day <- as.POSIXct(sprintf("%s 12:00:00", date), tz = tz) sequence <- seq(from = day, length.out = span, by = "days") - sunrise <- maptools::sunriset(lon.lat, sequence, direction = "sunrise", + sunrise <- suntools::sunriset(lon.lat, sequence, direction = "sunrise", POSIXct.out = TRUE) - sunset <- maptools::sunriset(lon.lat, sequence, direction = "sunset", + sunset <- suntools::sunriset(lon.lat, sequence, direction = "sunset", POSIXct.out = TRUE) - solar_noon <- maptools::solarnoon(lon.lat, sequence, POSIXct.out = TRUE) + solar_noon <- suntools::solarnoon(lon.lat, sequence, POSIXct.out = TRUE) data.frame(date = as.Date(sunrise$time), sunrise = as.numeric(format(sunrise$time, "%H%M")), solarnoon = as.numeric(format(solar_noon$time, "%H%M")), diff --git a/modules/data.atmosphere/inst/integrationTests/test.download.AmerifluxLBL.R b/modules/data.atmosphere/inst/integrationTests/test.download.AmerifluxLBL.R index e46d1d4830f..372f5bfb11d 100644 --- a/modules/data.atmosphere/inst/integrationTests/test.download.AmerifluxLBL.R +++ b/modules/data.atmosphere/inst/integrationTests/test.download.AmerifluxLBL.R @@ -1,4 +1,5 @@ library(testthat) +library(PEcAn.DB) test_download_AmerifluxLBL <- function(start_date, end_date, sitename, lat.in, lon.in) { # putting logger to debug mode @@ -7,13 +8,13 @@ test_download_AmerifluxLBL <- function(start_date, end_date, sitename, lat.in, l PEcAn.logger::logger.setLevel("DEBUG") # mocking functions - mockery::stub(PEcAn.DB::convert_input, 'dbfile.input.check', data.frame()) - mockery::stub(PEcAn.DB::convert_input, 'db.query', data.frame(id = 1)) + mockery::stub(convert_input, 'dbfile.input.check', data.frame()) + mockery::stub(convert_input, 'db.query', data.frame(id = 1)) withr::with_dir(tempdir(), { tmpdir <- getwd() # calling download function - res <- PEcAn.DB::convert_input( + res <- convert_input( input.id = NA, outfolder = tmpdir, formatname = NULL, @@ -31,23 +32,23 @@ test_download_AmerifluxLBL <- function(start_date, end_date, sitename, lat.in, l lon.in = lon.in, sitename = sitename ) - }) - - # checking if the file is downloaded - test_that("Downloaded files are present at the desired location", { - expect_true(file.exists(paste0(tmpdir, "/AMF_US-Akn_BASE_HH_6-5.csv"))) - }) - test_that("Downloaded data files have the right format", { - firstline <- system(paste0("head -4 ", paste0(tmpdir, "/AMF_US-Akn_BASE_HH_6-5.csv")), intern = TRUE) - lastline <- system(paste0("tail -1 ", paste0(tmpdir, "/AMF_US-Akn_BASE_HH_6-5.csv")), intern = TRUE) - - # checking if first line of CSV has the sitename - expect_true(grepl(sitename, firstline[1])) - - # fourth and last row checked to contain non-alphabetical data since these are used to verify start and end dates - expect_false(grepl("[A-Za-z]", firstline[4])) - expect_false(grepl("[A-Za-z]", lastline[1])) + # checking if the file is downloaded + test_that("Downloaded files are present at the desired location", { + expect_true(file.exists(paste0(tmpdir, "/AMF_US-Akn_BASE_HH_6-5.csv"))) + }) + + test_that("Downloaded data files have the right format", { + firstline <- system(paste0("head -4 ", paste0(tmpdir, "/AMF_US-Akn_BASE_HH_6-5.csv")), intern = TRUE) + lastline <- system(paste0("tail -1 ", paste0(tmpdir, "/AMF_US-Akn_BASE_HH_6-5.csv")), intern = TRUE) + + # checking if first line of CSV has the sitename + expect_true(grepl(sitename, firstline[1])) + + # fourth and last row checked to contain non-alphabetical data since these are used to verify start and end dates + expect_false(grepl("[A-Za-z]", firstline[4])) + expect_false(grepl("[A-Za-z]", lastline[1])) + }) }) } diff --git a/modules/data.atmosphere/inst/integrationTests/test.download.CRUNCEP.R b/modules/data.atmosphere/inst/integrationTests/test.download.CRUNCEP.R index 8512b49f34d..cfad888104b 100644 --- a/modules/data.atmosphere/inst/integrationTests/test.download.CRUNCEP.R +++ b/modules/data.atmosphere/inst/integrationTests/test.download.CRUNCEP.R @@ -1,5 +1,6 @@ library(testthat) library(ncdf4) +library(PEcAn.DB) test_download_CRUNCEP <- function(start_date, end_date, lat.in, lon.in, method, maxErrors, sleep) { # putting logger to debug mode @@ -8,12 +9,12 @@ test_download_CRUNCEP <- function(start_date, end_date, lat.in, lon.in, method, PEcAn.logger::logger.setLevel("DEBUG") # mocking functions - mockery::stub(PEcAn.DB::convert_input, 'dbfile.input.check', data.frame()) - mockery::stub(PEcAn.DB::convert_input, 'db.query', data.frame(id = 1)) + mockery::stub(convert_input, 'dbfile.input.check', data.frame()) + mockery::stub(convert_input, 'db.query', data.frame(id = 1)) withr::with_dir(tempdir(), { tmpdir <- getwd() - PEcAn.DB::convert_input( + convert_input( input.id = NA, outfolder = tmpdir, formatname = NULL, diff --git a/modules/data.atmosphere/inst/integrationTests/test.download.ERA5.R b/modules/data.atmosphere/inst/integrationTests/test.download.ERA5.R index 90f37c8e0c0..bf84c31dd85 100644 --- a/modules/data.atmosphere/inst/integrationTests/test.download.ERA5.R +++ b/modules/data.atmosphere/inst/integrationTests/test.download.ERA5.R @@ -1,5 +1,6 @@ library(testthat) library(ncdf4) +library(PEcAn.DB) test_download_ERA5 <- function(start_date, end_date, lat.in, lon.in, product_types, reticulate_python) { # putting logger to debug mode @@ -9,16 +10,16 @@ test_download_ERA5 <- function(start_date, end_date, lat.in, lon.in, product_typ # mocking functions - mockery::stub(PEcAn.DB::convert_input, 'dbfile.input.check', data.frame()) - mockery::stub(PEcAn.DB::convert_input, 'db.query', data.frame(id = 1)) + mockery::stub(convert_input, 'dbfile.input.check', data.frame()) + mockery::stub(convert_input, 'db.query', data.frame(id = 1)) # additional mocks needed since download.ERA5 does not return data as other download functions - mockery::stub(PEcAn.DB::convert_input, 'length', 2) - mockery::stub(PEcAn.DB::convert_input, 'purrr::map_dfr', data.frame(missing = c(FALSE), empty = c(FALSE))) + mockery::stub(convert_input, 'length', 2) + mockery::stub(convert_input, 'purrr::map_dfr', data.frame(missing = c(FALSE), empty = c(FALSE))) withr::with_dir(tempdir(), { tmpdir <- getwd() - PEcAn.DB::convert_input( + convert_input( input.id = NA, outfolder = tmpdir, formatname = NULL, @@ -37,51 +38,51 @@ test_download_ERA5 <- function(start_date, end_date, lat.in, lon.in, product_typ product_types = product_types, reticulate_python = reticulate_python ) - }) - - test_that("All the required files are downloaded and stored at desired location", { - expect_true(file.exists(paste0(tmpdir, "/era5.2m_dewpoint_temperature.nc"))) - expect_true(file.exists(paste0(tmpdir, "/era5.2m_temperature.nc"))) - expect_true(file.exists(paste0(tmpdir, "/era5.10m_u_component_of_wind.nc"))) - expect_true(file.exists(paste0(tmpdir, "/era5.10m_v_component_of_wind.nc"))) - expect_true(file.exists(paste0(tmpdir, "/era5.surface_pressure.nc"))) - expect_true(file.exists(paste0(tmpdir, "/era5.surface_solar_radiation_downwards.nc"))) - expect_true(file.exists(paste0(tmpdir, "/era5.surface_thermal_radiation_downwards.nc"))) - expect_true(file.exists(paste0(tmpdir, "/era5.total_precipitation.nc"))) - }) + + test_that("All the required files are downloaded and stored at desired location", { + expect_true(file.exists(paste0(tmpdir, "/era5.2m_dewpoint_temperature.nc"))) + expect_true(file.exists(paste0(tmpdir, "/era5.2m_temperature.nc"))) + expect_true(file.exists(paste0(tmpdir, "/era5.10m_u_component_of_wind.nc"))) + expect_true(file.exists(paste0(tmpdir, "/era5.10m_v_component_of_wind.nc"))) + expect_true(file.exists(paste0(tmpdir, "/era5.surface_pressure.nc"))) + expect_true(file.exists(paste0(tmpdir, "/era5.surface_solar_radiation_downwards.nc"))) + expect_true(file.exists(paste0(tmpdir, "/era5.surface_thermal_radiation_downwards.nc"))) + expect_true(file.exists(paste0(tmpdir, "/era5.total_precipitation.nc"))) + }) - test_that("All ERA5 data files have the correct variable units", { - nc <- nc_open(paste0(tmpdir, "/era5.2m_dewpoint_temperature.nc")) - expect_equal(nc$var$d2m$units, "K") - nc_close(nc) + test_that("All ERA5 data files have the correct variable units", { + nc <- nc_open(paste0(tmpdir, "/era5.2m_dewpoint_temperature.nc")) + expect_equal(nc$var$d2m$units, "K") + nc_close(nc) - nc <- nc_open(paste0(tmpdir, "/era5.2m_temperature.nc")) - expect_equal(nc$var$t2m$units, "K") - nc_close(nc) + nc <- nc_open(paste0(tmpdir, "/era5.2m_temperature.nc")) + expect_equal(nc$var$t2m$units, "K") + nc_close(nc) - nc <- nc_open(paste0(tmpdir, "/era5.10m_u_component_of_wind.nc")) - expect_equal(nc$var$u10$units, "m s**-1") - nc_close(nc) + nc <- nc_open(paste0(tmpdir, "/era5.10m_u_component_of_wind.nc")) + expect_equal(nc$var$u10$units, "m s**-1") + nc_close(nc) - nc <- nc_open(paste0(tmpdir, "/era5.10m_v_component_of_wind.nc")) - expect_equal(nc$var$v10$units, "m s**-1") - nc_close(nc) + nc <- nc_open(paste0(tmpdir, "/era5.10m_v_component_of_wind.nc")) + expect_equal(nc$var$v10$units, "m s**-1") + nc_close(nc) - nc <- nc_open(paste0(tmpdir, "/era5.surface_pressure.nc")) - expect_equal(nc$var$sp$units, "Pa") - nc_close(nc) + nc <- nc_open(paste0(tmpdir, "/era5.surface_pressure.nc")) + expect_equal(nc$var$sp$units, "Pa") + nc_close(nc) - nc <- nc_open(paste0(tmpdir, "/era5.surface_solar_radiation_downwards.nc")) - expect_equal(nc$var$ssrd$units, "J m**-2") - nc_close(nc) + nc <- nc_open(paste0(tmpdir, "/era5.surface_solar_radiation_downwards.nc")) + expect_equal(nc$var$ssrd$units, "J m**-2") + nc_close(nc) - nc <- nc_open(paste0(tmpdir, "/era5.surface_thermal_radiation_downwards.nc")) - expect_equal(nc$var$strd$units, "J m**-2") - nc_close(nc) + nc <- nc_open(paste0(tmpdir, "/era5.surface_thermal_radiation_downwards.nc")) + expect_equal(nc$var$strd$units, "J m**-2") + nc_close(nc) - nc <- nc_open(paste0(tmpdir, "/era5.total_precipitation.nc")) - expect_equal(nc$var$tp$units, "m") - nc_close(nc) + nc <- nc_open(paste0(tmpdir, "/era5.total_precipitation.nc")) + expect_equal(nc$var$tp$units, "m") + nc_close(nc) + }) }) } diff --git a/modules/data.atmosphere/tests/Rcheck_reference.log b/modules/data.atmosphere/tests/Rcheck_reference.log index 1f7a42f6634..be30baa7ac2 100644 --- a/modules/data.atmosphere/tests/Rcheck_reference.log +++ b/modules/data.atmosphere/tests/Rcheck_reference.log @@ -76,13 +76,13 @@ Found the following (possibly) invalid URLs: The Date field is over a month old. * checking package namespace information ... OK * checking package dependencies ... WARNING -Imports includes 39 non-default packages. +Imports includes 40 non-default packages. Importing from so many packages makes the package vulnerable to any of them becoming unavailable. Move as many as possible to Suggests and use conditionally. * checking package dependencies ... NOTE -Imports includes 39 non-default packages. +Imports includes 40 non-default packages. Importing from so many packages makes the package vulnerable to any of them becoming unavailable. Move as many as possible to Suggests and use conditionally. diff --git a/modules/data.land/DESCRIPTION b/modules/data.land/DESCRIPTION index 5ec23616b0f..eb187ac1667 100644 --- a/modules/data.land/DESCRIPTION +++ b/modules/data.land/DESCRIPTION @@ -61,7 +61,6 @@ Suggests: PEcAn.settings, redland, raster, - rgdal, RPostgreSQL, testthat (>= 1.0.2) License: BSD_3_clause + file LICENSE diff --git a/modules/data.land/R/gis.functions.R b/modules/data.land/R/gis.functions.R index 8f9434aea3f..0fa83941f88 100644 --- a/modules/data.land/R/gis.functions.R +++ b/modules/data.land/R/gis.functions.R @@ -68,14 +68,13 @@ shp2kml <- function(dir, ext, kmz = FALSE, proj4 = NULL, color = NULL, NameField # Read in shapefile(s) & get coordinates/projection info shp.file <- # readShapeSpatial(file.path(dir,i),verbose=TRUE) coordinates(test) <- ~X+Y - layers <- rgdal::ogrListLayers(file.path(dir, i)) - info <- rgdal::ogrInfo(file.path(dir, i), layers) + layers <- sf::st_layers(file.path(dir, i)) # shp.file <- readOGR(file.path(dir,i),layer=layers) # no need to read in file # Display vector info to the console print("") - print(paste0("Input layers: ", layers)) - print(paste0("Input projection info: ", info$p4s)) + print(paste0("Input layers: ", layers$name)) + print(paste0("Input projection info: ", layers$crs[[1]]$input)) print("") # Write out kml/kmz using plotKML package if (is.null(color)){ color <- 'grey70' } @@ -133,6 +132,7 @@ get.attributes <- function(file, coords) { #library(fields) #require(rgdal) + # note that OGR support is now provided by the sf and terra packages among others # print('NOT IMPLEMENTED YET') subset_layer(file,coords) } # get.attributes diff --git a/modules/data.mining/DESCRIPTION b/modules/data.mining/DESCRIPTION index 74176560466..7ac5389be56 100644 --- a/modules/data.mining/DESCRIPTION +++ b/modules/data.mining/DESCRIPTION @@ -1,6 +1,6 @@ Package: PEcAn.data.mining Type: Package -Title: PEcAn functions used for exploring model residuals and structures +Title: PEcAn Functions Used for Exploring Model Residuals and Structures Description: (Temporary description) PEcAn functions used for exploring model residuals and structures Version: 1.7.2 Date: 2021-10-04 @@ -21,4 +21,4 @@ LazyLoad: yes LazyData: FALSE Collate: Encoding: UTF-8 -RoxygenNote: 6.1.1 +RoxygenNote: 7.1.2 diff --git a/modules/data.mining/tests/Rcheck_reference.log b/modules/data.mining/tests/Rcheck_reference.log new file mode 100644 index 00000000000..7746a8b418c --- /dev/null +++ b/modules/data.mining/tests/Rcheck_reference.log @@ -0,0 +1,49 @@ +* using log directory ‘/tmp/Rtmpyv5R27/PEcAn.data.mining.Rcheck’ +* using R version 4.1.3 (2022-03-10) +* using platform: x86_64-pc-linux-gnu (64-bit) +* using session charset: UTF-8 +* using options ‘--no-manual --as-cran’ +* checking for file ‘PEcAn.data.mining/DESCRIPTION’ ... OK +* checking extension type ... Package +* this is package ‘PEcAn.data.mining’ version ‘1.7.2’ +* package encoding: UTF-8 +* checking package namespace information ... OK +* checking package dependencies ... OK +* checking if this is a source package ... OK +* checking if there is a namespace ... OK +* checking for executable files ... OK +* checking for hidden files and directories ... OK +* checking for portable file names ... OK +* checking for sufficient/correct file permissions ... OK +* checking serialization versions ... OK +* checking whether package ‘PEcAn.data.mining’ can be installed ... OK +* checking installed package size ... OK +* checking package directory ... OK +* checking for future file timestamps ... OK +* checking DESCRIPTION meta-information ... NOTE +Malformed Description field: should contain one or more complete sentences. +* checking top-level files ... OK +* checking for left-over files ... OK +* checking index information ... OK +* checking package subdirectories ... OK +* checking whether the package can be loaded ... OK +* checking whether the package can be loaded with stated dependencies ... OK +* checking whether the package can be unloaded cleanly ... OK +* checking whether the namespace can be loaded with stated dependencies ... OK +* checking whether the namespace can be unloaded cleanly ... OK +* checking loading without being on the library search path ... OK +* checking examples ... NONE +* checking for unstated dependencies in ‘tests’ ... OK +* checking tests ... + Running ‘testthat.R’ + OK +* checking for non-standard things in the check directory ... OK +* checking for detritus in the temp directory ... OK +* DONE + +Status: 1 NOTE +See + ‘/tmp/Rtmpyv5R27/PEcAn.data.mining.Rcheck/00check.log’ +for details. + + diff --git a/modules/data.remote/DESCRIPTION b/modules/data.remote/DESCRIPTION index 68e3ee22e89..617f49b1338 100644 --- a/modules/data.remote/DESCRIPTION +++ b/modules/data.remote/DESCRIPTION @@ -20,7 +20,6 @@ Imports: PEcAn.utils, purrr, XML, - raster, sp, MODISTools (>= 1.1.0), reticulate, @@ -28,6 +27,7 @@ Imports: magrittr, PEcAn.remote, stringr (>= 1.1.0), + terra, doParallel, parallel, foreach @@ -36,7 +36,7 @@ Suggests: dplyr, ggplot2, lubridate, - rgdal, + raster, reshape, testthat (>= 1.0.2), tibble diff --git a/modules/data.remote/R/NLCD.R b/modules/data.remote/R/NLCD.R index dfc712f2e09..32517eff2ae 100644 --- a/modules/data.remote/R/NLCD.R +++ b/modules/data.remote/R/NLCD.R @@ -23,7 +23,7 @@ download.NLCD <- function(outdir, year = 2011, con = NULL) { ## before downloading, check if the file already exists on this host if (!is.null(con)) { - library(PEcAn.DB) + chk <- dbfile.check(type = "Input", id = input.id, con = con) if (nrow(chk) > 0) { machines <- db.query(paste("SELECT * from machines where id in (", @@ -68,11 +68,9 @@ download.NLCD <- function(outdir, year = 2011, con = NULL) { ##' ##' @description Based on codes from Christy Rollinson and from Max Joseph (http://mbjoseph.github.io/2014/11/08/nlcd.html) extract_NLCD <- function(buffer, coords, data_dir = NULL, con = NULL, year = 2011) { - library(raster) - require(rgdal) - + if (!is.null(con)) { - library(PEcAn.DB) + if (year == 2001) { input.id <- 1000000482 } else if (year == 2011) { @@ -104,21 +102,24 @@ extract_NLCD <- function(buffer, coords, data_dir = NULL, con = NULL, year = 201 print(paste("File not found:", filename)) return(NULL) } - nlcd <- raster(filename) + + # WARNING: the following extraction previously used raster and sp package functions + # this new implementation with terra functions has not been thoroughly tested + nlcd <- terra::rast(filename) # transform points - sites <- SpatialPoints(coords = coords, proj4string = CRS("+proj=longlat +datum=WGS84")) - sites <- spTransform(sites, crs(nlcd)) + sites <- terra::vect(coords, geom=c("long", "lat"), crs="+proj=longlat +datum=WGS84") + sites <- terra::buffer(sites, width=buffer) # extract - sum.raw <- table(extract(nlcd, sites, buffer = buffer)) + sum.raw <- table(terra::extract(nlcd, sites)) summ <- prop.table(sum.raw) - mydf <- data.frame(cover = names(summ), percent = as.vector(summ), count = as.vector(sum.raw)) + mydf <- data.frame(cover.name = colnames(summ), percent = as.vector(summ), count = as.vector(sum.raw)) + mydf <- mydf[mydf$count!=0,] - # land cover number to name conversions - cover.table <- nlcd@data@attributes[[1]] - cover.names <- cover.table[as.numeric(as.character(mydf$cover)) + 1, grep("Land", names(cover.table))] - mydf$cover.name <- cover.names + # land cover name to number conversions + nlcd_levels <- terra::levels(nlcd)[[1]] + mydf$cover <- nlcd_levels$value[nlcd_levels$`Land Cover Class` %in% mydf$cover.name] return(mydf) } # extract_NLCD diff --git a/modules/data.remote/R/download.thredds.R b/modules/data.remote/R/download.thredds.R index 09d44ac1337..04fc4b99923 100755 --- a/modules/data.remote/R/download.thredds.R +++ b/modules/data.remote/R/download.thredds.R @@ -82,7 +82,7 @@ download.thredds.AGB <- function(outdir = NULL, site_ids, run_parallel = FALSE, } else { ncores <- parallel::detectCores() -1 } - require(doParallel) + PEcAn.logger::logger.info(paste0("Running in parallel with: ", ncores)) cl = parallel::makeCluster(ncores) doParallel::registerDoParallel(cl) diff --git a/modules/data.remote/tests/Rcheck_reference.log b/modules/data.remote/tests/Rcheck_reference.log index 822a9411303..814dfe66e10 100644 --- a/modules/data.remote/tests/Rcheck_reference.log +++ b/modules/data.remote/tests/Rcheck_reference.log @@ -1,55 +1,12 @@ -* using log directory ‘/home/tanishq010/pecan/modules/PEcAn.data.remote.Rcheck’ -* using R version 4.2.1 (2022-06-23) +* using log directory ‘/tmp/Rtmpr2UgRH/PEcAn.data.remote.Rcheck’ +* using R version 4.1.3 (2022-03-10) * using platform: x86_64-pc-linux-gnu (64-bit) * using session charset: UTF-8 -* using options ‘--no-tests --no-manual --as-cran’ +* using options ‘--no-manual --as-cran’ * checking for file ‘PEcAn.data.remote/DESCRIPTION’ ... OK * checking extension type ... Package * this is package ‘PEcAn.data.remote’ version ‘1.7.2.9000’ * package encoding: UTF-8 -* checking CRAN incoming feasibility ... WARNING -Maintainer: ‘Bailey Morrison ’ - -New submission - -Version contains large components (1.7.2.9000) - -License components with restrictions and base license permitting such: - BSD_3_clause + file LICENSE -File 'LICENSE': - University of Illinois/NCSA Open Source License - - Copyright (c) 2012, University of Illinois, NCSA. All rights reserved. - - Permission is hereby granted, free of charge, to any person obtaining - a copy of this software and associated documentation files (the - "Software"), to deal with the Software without restriction, including - without limitation the rights to use, copy, modify, merge, publish, - distribute, sublicense, and/or sell copies of the Software, and to - permit persons to whom the Software is furnished to do so, subject to - the following conditions: - - - Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimers. - - Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimers in the - documentation and/or other materials provided with the distribution. - - Neither the names of University of Illinois, NCSA, nor the names - of its contributors may be used to endorse or promote products - derived from this Software without specific prior written permission. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, - EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF - MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. - IN NO EVENT SHALL THE CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE FOR - ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF - CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION - WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS WITH THE SOFTWARE. - -Strong dependencies not in mainstream repositories: - PEcAn.DB, PEcAn.utils, PEcAn.logger, PEcAn.remote - -The Date field is over a month old. * checking package namespace information ... OK * checking package dependencies ... OK * checking if this is a source package ... OK @@ -63,11 +20,7 @@ The Date field is over a month old. * checking installed package size ... OK * checking package directory ... OK * checking for future file timestamps ... OK -* checking DESCRIPTION meta-information ... NOTE -Author field differs from that derived from Authors@R - Author: ‘Mike Dietze, Bailey Morrison’ - Authors@R: ‘Mike Dietze [aut], Bailey Morrison [aut, cre], University of Illinois, NCSA [cph]’ - +* checking DESCRIPTION meta-information ... OK * checking top-level files ... OK * checking for left-over files ... OK * checking index information ... OK @@ -80,49 +33,22 @@ Author field differs from that derived from Authors@R * checking whether the namespace can be loaded with stated dependencies ... OK * checking whether the namespace can be unloaded cleanly ... OK * checking loading without being on the library search path ... OK -* checking use of S3 registration ... OK * checking dependencies in R code ... WARNING -'library' or 'require' calls not declared from: - ‘PEcAn.DB’ ‘doParallel’ ‘raster’ 'library' or 'require' calls in package code: ‘PEcAn.DB’ ‘doParallel’ ‘raster’ ‘rgdal’ Please use :: or requireNamespace() instead. See section 'Suggested packages' in the 'Writing R Extensions' manual. -Namespace in Imports field not imported from: ‘stringr’ - All declared Imports should be used. +Missing or unexported objects: + ‘PEcAn.utils::download_file’ ‘PEcAn.utils::need_packages’ * checking S3 generic/method consistency ... OK * checking replacement functions ... OK * checking foreign function calls ... OK * checking R code for possible problems ... NOTE -call_MODIS: no visible global function definition for ‘write.csv’ download.LandTrendr.AGB: no visible binding for global variable ‘k’ -download.NLCD: no visible global function definition for ‘dbfile.check’ -download.NLCD: no visible global function definition for ‘db.query’ -download.NLCD: no visible global function definition for - ‘dbfile.insert’ -download.thredds.AGB : get_data: no visible global function definition - for ‘write.csv’ -download.thredds.AGB: no visible global function definition for - ‘foreach’ -download.thredds.AGB: no visible global function definition for - ‘stopCluster’ -extract_NLCD: no visible global function definition for ‘dbfile.check’ -extract_NLCD: no visible global function definition for ‘db.query’ -extract_NLCD: no visible global function definition for ‘raster’ -extract_NLCD: no visible global function definition for ‘SpatialPoints’ -extract_NLCD: no visible global function definition for ‘CRS’ -extract_NLCD: no visible global function definition for ‘spTransform’ -extract_NLCD: no visible global function definition for ‘crs’ -extract_NLCD: no visible global function definition for ‘extract’ grid2netcdf: no visible binding for global variable ‘years’ grid2netcdf: no visible binding for global variable ‘yieldarray’ Undefined global functions or variables: - CRS SpatialPoints crs db.query dbfile.check dbfile.insert - download.file extract foreach k raster spTransform stopCluster - write.csv -Consider adding - importFrom("utils", "download.file", "write.csv") -to your NAMESPACE file. + k years yieldarray * checking Rd files ... OK * checking Rd metadata ... OK * checking Rd line widths ... OK @@ -148,4 +74,9 @@ Extensions’ manual. * checking for detritus in the temp directory ... OK * DONE -Status: 3 WARNINGs, 2 NOTEs +Status: 2 WARNINGs, 1 NOTE +See + ‘/tmp/Rtmpr2UgRH/PEcAn.data.remote.Rcheck/00check.log’ +for details. + + diff --git a/scripts/check_with_errors.R b/scripts/check_with_errors.R index 4a4e8701658..f90a75532f4 100755 --- a/scripts/check_with_errors.R +++ b/scripts/check_with_errors.R @@ -7,6 +7,8 @@ log_level <- Sys.getenv("LOGLEVEL", unset = NA) die_level <- Sys.getenv("DIELEVEL", unset = NA) redocument <- as.logical(Sys.getenv("REBUILD_DOCS", unset = NA)) runtests <- as.logical(Sys.getenv("RUN_TESTS", unset = TRUE)) +resave <- as.logical(Sys.getenv("RESAVE_CHECKS", unset = FALSE)) +if (resave) die_level <- "never" old_file <- file.path(pkg, "tests", "Rcheck_reference.log") if (file.exists(old_file)) { @@ -71,7 +73,7 @@ if (log_notes && n_notes > 0) { # such that it's not yet practical to break the build on every warning. # Cleaning this up is a long-term goal, but will take time. # Meanwhile, we compare against a cached historic check output to enforce that -# no *new* warnings are added. As historic warnings are removed, we will update +# no *new* warnings are added. As historic warnings are removed, we update # the cached results to ensure they stay gone. # # To compare checks, we take a two-level approach: @@ -83,16 +85,16 @@ if (log_notes && n_notes > 0) { ### # To update reference files after fixing an old warning: # * Run check_with_errors.R to be sure the check is currently passing -# * Delete the file you want to update -# * Uncomment this section -# * run `DIELEVEL=never Rscript scripts/check_with_errors.R path/to/package` -# * recomment this section -# * Commit updated file -# if (!file.exists(old_file)) { -# cat("No reference check file found. Saving current results as the new standard\n") -# cat(chk$stdout, file = old_file) -# quit("no") -# } +# * run `RESAVE_CHECKS=true Rscript scripts/check_with_errors.R path/to/package` +# * Commit updated /tests/Rcheck_reference.log file +if (resave) { + cat("Saving current check results as the new standard\n") + if (file.exists(old_file)) { + cat("**Overwriting** existing saved check output\n") + } + cat(chk$stdout, file = old_file) + quit("no") +} ### # everything beyond this point is comparing to old version