diff --git a/.gitignore b/.gitignore index be3c08170..85fea88c6 100644 --- a/.gitignore +++ b/.gitignore @@ -46,4 +46,5 @@ pkgdown/* /Meta/ # rosm cache +rosm.cache/ rosm.cache diff --git a/CRAN-SUBMISSION b/CRAN-SUBMISSION deleted file mode 100644 index 4c2f0c30b..000000000 --- a/CRAN-SUBMISSION +++ /dev/null @@ -1,3 +0,0 @@ -Version: 0.7.3 -Date: 2023-10-20 02:23:58 UTC -SHA: 71b8994af389b81aba24d2b3e5afbb97386e36c5 diff --git a/DESCRIPTION b/DESCRIPTION index c00e3372c..778a279b8 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -1,12 +1,12 @@ Package: bioRad Title: Biological Analysis and Visualization of Weather Radar Data -Version: 0.7.3.9000 +Version: 0.8.1 Description: Extract, visualize and summarize aerial movements of birds and insects from weather radar data. See Dokter, A. M. et al. (2018) "bioRad: biological analysis and visualization of weather radar data" for a software paper describing package and methodologies. Authors@R: c( - person("Adriaan M.", "Dokter", role = c("aut", "cre"), email = "amd427@cornell.edu", comment = c(ORCID = "0000-0001-6573-066X")), + person("Adriaan M.", "Dokter", role = c("aut", "cre"), email = "biorad@cornell.edu", comment = c(ORCID = "0000-0001-6573-066X")), person("Peter", "Desmet", role = "aut", email = "peter.desmet@inbo.be", comment = c(ORCID = "0000-0002-8442-8025")), person("Bart", "Kranstauber", role = "aut", email = "b.kranstauber@uva.nl", comment = c(ORCID = "0000-0001-8303-780X")), person("Cecilia", "Nilsson", role = "aut", email = "cecilia.nilsson709@gmail.com", comment = c(ORCID = "0000-0001-8957-4411")), @@ -32,7 +32,6 @@ Imports: curl, dplyr (>= 1.1.0), fields, - frictionless, ggplot2, glue, graphics, @@ -44,10 +43,13 @@ Imports: readr, rhdf5, rlang, + sf, sp, stats, + stringr, suntools, tidyr, + tidyselect, utils, viridis, viridisLite @@ -58,14 +60,12 @@ Suggests: prettymapr, rmarkdown, rosm, - sf, testthat (>= 3.0.0), - tidyselect, vdiffr, vol2birdR LazyData: true Encoding: UTF-8 VignetteBuilder: knitr -RoxygenNote: 7.2.3 +RoxygenNote: 7.3.2 Roxygen: list(markdown = TRUE) Config/testthat/edition: 3 diff --git a/NAMESPACE b/NAMESPACE index 84377c781..b3d6821cc 100644 --- a/NAMESPACE +++ b/NAMESPACE @@ -150,4 +150,3 @@ export(write_pvolfile) importFrom(dplyr,"%>%") importFrom(dplyr,.data) importFrom(methods,as) -importFrom(stats,na.omit) diff --git a/NEWS.md b/NEWS.md index 694f446ac..6f5f00e72 100644 --- a/NEWS.md +++ b/NEWS.md @@ -1,17 +1,39 @@ -# bioRad 0.7.3.9000 +# bioRad 0.8.1 + +## bugfixes + +* dbz_all field in VPTS CSV files is now correctly mapped to DBZH field + +* non-standard data fields are now retained in vpts objects produced with as.vpts() + +# bioRad 0.8.0 + +## New features + +* ENRAM VPTS data exchange format added in package data as `vpts_schema.rda` + +* VPTS files are now able to be validated with `validate_vpts()` which uses the schema to check for min/max constraint violations for specific fields, regex and datetime formatting * speed up `integrate_to_ppi()` and `project_as_ppi()` by using native `sf` functions (#669) +* support for tidyverse select method for polar volume and polar scan objects (#668,#460) + ## Bugfixes -* support for tidyverse select method for polar volume and polar scan objects (#668,#460) +* Corrected the default refractive index value used in conversion of linear reflectivity (eta) to logarithmic reflectivity (dBZ). The effect is a 7% increase in animal densities in output of functions `integrate_to_ppi()` and `read_cajun()` only (#642). -* Updated the default refractive index value used in conversion of linear reflectivity (eta) to logarithmic reflectivity (dBZ) (#642). The effect is a 7% increase in animal densities in output of functions `integrate_to_ppi()` and `read_cajun()` only. +* Fixed the handling of empty numeric vectors when plotting clutter maps (#655) -# bioRad 0.7.3 +* Fixed warning when reading VPTS csv containing multiple values in `lat`, `lon`, '`rcs`' or `sd_vvp_threshold` (#651) -## New features +* Fixed enabling/disabling of `single_pol` flag in `calculate_vp()` (#646) + +* Updated the s3 source bucket of `download_vpfiles()` to https://aloftdata.s3-eu-west-1.amazonaws.com (#648) + +* Fixed an error in the idw method of `composite_ppi()` that emerged with the evolution of dependency package sp / deprecation of rgdal (#666) + +# bioRad 0.7.3 * Replaced the `ggmap` package with `ggspatial` for map visualizations. This change was made as `ggmap` no longer provides reliable open-source basemaps without the necessity to register for an API key (#638). @@ -76,7 +98,6 @@ bioRad 0.7 includes a major backend overhaul that deprecates the use of Docker. ## Deprecations - * Argument `local_install` in `calculate_vp()` and `apply_mistnet()` is now deprecated. * Functions `check_docker()` and `update_docker()` have been deprecated. diff --git a/R/apply_mistnet.R b/R/apply_mistnet.R index 46337ca26..0589c9e88 100644 --- a/R/apply_mistnet.R +++ b/R/apply_mistnet.R @@ -85,6 +85,7 @@ #' @examples #' \donttest{ #' # make sure you have installed the MistNet libraries and model, using: +#' if (requireNamespace("vol2birdR", quietly = TRUE)){ #' if(!vol2birdR::mistnet_exists()){ #' vol2birdR::install_mistnet() #' vol2birdR::install_mistnet_model() @@ -125,6 +126,7 @@ #' # Remove file #' file.remove(tempfile) #' } +#' } apply_mistnet <- function(file, pvolfile_out, verbose = FALSE, mount = dirname(file), load = TRUE, mistnet_elevations = c(0.5, 1.5, 2.5, 3.5, 4.5), diff --git a/R/as.vpts.R b/R/as.vpts.R index 771c980c0..e21b6c942 100644 --- a/R/as.vpts.R +++ b/R/as.vpts.R @@ -1,4 +1,3 @@ - #' Convert a dataframe into a vpts object #' #' @param data a dataframe created from a VPTS CSV file @@ -18,6 +17,8 @@ as.vpts <- function(data) { dplyr::rename(DBZH = "dbz_all") } + validate_vpts(data) + height <- datetime <- source_file <- radar <- NULL # Throw error if nrows per height are not identical @@ -35,13 +36,6 @@ as.vpts <- function(data) { msg = "`data` must contain data of a single radar." ) - if (!exists("cached_schema")) { - # Load the schema from the data directory and cache it - cached_schema <- jsonlite::fromJSON(system.file("extdata", "vpts-csv-table-schema.json", package = "bioRad"), - simplifyDataFrame = FALSE, simplifyVector = TRUE - ) - } - data <- dplyr::mutate( data, radar = as.factor(radar), @@ -71,37 +65,55 @@ as.vpts <- function(data) { regular <- FALSE } -# Get attributes -radar_height <- data[["radar_height"]][1] -interval <- unique(heights[-1] - heights[-length(heights)]) -wavelength <- data[["radar_wavelength"]][1] - -# Check and warn for multiple values of specific attributes and return only the first values of those attributes -check_multivalue_attributes <- function(data) { - attributes <- c("radar_longitude", "radar_latitude", "rcs", "sd_vvp_threshold") - first_values <- list() - for (attr in attributes) { - if (length(unique(data[[attr]])) > 1) { - warning(paste0("multiple `", attr, "` values found, storing only first (", - as.character(data[[attr]][1]), ") as the functional attribute.")) + # Get attributes + radar_height <- data[["radar_height"]][1] + interval <- unique(heights[-1] - heights[-length(heights)]) + wavelength <- data[["radar_wavelength"]][1] + + # Check and warn for multiple values of specific attributes and return only the first values of those attributes + check_multivalue_attributes <- function(data) { + attributes <- c("radar_longitude", "radar_latitude", "rcs", "sd_vvp_threshold") + first_values <- list() + for (attr in attributes) { + if (length(unique(data[[attr]])) > 1) { + warning(paste0("multiple ", as.character(substitute(attr))," values found, storing only first (", + as.character(data[[attr]][1]), ") as the functional attribute.")) + } + first_values[[attr]] <- data[[attr]][1] } - first_values[[attr]] <- data[[attr]][1] + return(first_values) } - return(first_values) -} first_values <- check_multivalue_attributes(data) - + # Directly extract and assign values from the list lon <- first_values$radar_longitude lat <- first_values$radar_latitude rcs <- first_values$rcs sd_vvp_threshold <- first_values$sd_vvp_threshold - - # Convert dataframe - maskvars <- c("radar", "rcs", "sd_vvp_threshold", "radar_latitude", "radar_longitude", "radar_height", "radar_wavelength", "source_file", "datetime", "height", "sunrise", "sunset", "day") - data <- df_to_mat_list(data, maskvars, cached_schema) + # column names not to store in the vpts$data slot + radvars_exclude <- c("radar","datetime","height","rcs","sd_vvp_threshold","radar_latitude","radar_longitude","radar_height","radar_wavelength") + # radvars to include in vpts$data slot + radvars <- names(data)[!names(data) %in% radvars_exclude] + + # calculate number of vertical profiles present + n_vp <- nrow(data)/length(heights) + + # cast data.frame to list of matrices + data_output <- lapply(radvars, function(x) matrix(data[[x]],ncol=n_vp)) + names(data_output)=radvars + + # List of vectors to check + vectors_to_check <- list(heights = heights, interval = interval, radar_height = radar_height, lon = lon, lat = lat) + + # Identify empty vectors + empty_vectors <- names(vectors_to_check)[sapply(vectors_to_check, function(v) length(v) == 0)] + + # Stop execution if any empty vectors are found + if (length(empty_vectors) > 0) { + stop("Empty vectors detected: ", paste(empty_vectors, collapse=", ")) + } # Create vpts object output <- list( @@ -110,7 +122,7 @@ check_multivalue_attributes <- function(data) { height = heights, daterange = c(min(datetime), max(datetime)), timesteps = difftimes, - data = data, + data = data_output, attributes = list( where = data.frame( interval = as.integer(interval), diff --git a/R/calculate_vp.R b/R/calculate_vp.R index d95847093..50272263b 100644 --- a/R/calculate_vp.R +++ b/R/calculate_vp.R @@ -173,18 +173,22 @@ #' # Locate and read the polar volume example file #' pvolfile_source <- system.file("extdata", "volume.h5", package = "bioRad") #' -#' # Copy the file to a home directory with read/write permissions +#' # Copy the file to a temporary directory with read/write permissions #' pvolfile <- paste0(tempdir(),"/volume.h5") #' file.copy(pvolfile_source, pvolfile) #' #' # Calculate the profile +#' if (requireNamespace("vol2birdR", quietly = TRUE)) { #' vp <- calculate_vp(pvolfile) -#' +#' #' # Get summary info #' vp #' #' # Clean up #' file.remove(pvolfile) +#' +#' } +#' calculate_vp <- function(file, vpfile = "", pvolfile_out = "", autoconf = FALSE, verbose = FALSE, warnings = TRUE, mount, sd_vvp_threshold, diff --git a/R/composite_ppi.R b/R/composite_ppi.R index a144f0e66..0c018e71f 100644 --- a/R/composite_ppi.R +++ b/R/composite_ppi.R @@ -75,8 +75,10 @@ #' composite <- composite_ppi(ppis, method = "max", res=1000) #' #' # Plot the calculated max product on the basemap +#' if (all(sapply(c("ggspatial","prettymapr", "rosm"), requireNamespace, quietly = TRUE))) { #' map(composite) #' } +#' } composite_ppi <- function(x, param = "all", diff --git a/R/data.R b/R/data.R index e32aed71a..372272108 100644 --- a/R/data.R +++ b/R/data.R @@ -70,3 +70,14 @@ # example_vpts$attributes$where$lat <- 42.2 # example_vpts$attributes$where$lon <- -75.98 # save(example_vpts, file = "data/example_vpts.rda", compress = "xz") + +#' ENRAM-defined VPTS schema +#' +#' A dataset containing the schema definitions for VPTS data validation. +#' @rdname vpts_schema +#' @keywords internal +#' @return A list object containing the data model for the standard VPTS exchange format. +#' @examples +#' # Reload vpts_schema from package (e.g. in case it was altered) +#' data(vpts_schema) +"vpts_schema" diff --git a/R/download_pvolfiles.R b/R/download_pvolfiles.R index 911a8d7bf..75073a946 100644 --- a/R/download_pvolfiles.R +++ b/R/download_pvolfiles.R @@ -21,6 +21,7 @@ #' a message and a progress bar in the console indicating the download status. #' @examples #' # create temporary directory +#' if (requireNamespace("aws.s3", quietly = TRUE)) { #' temp_dir <- paste0(tempdir(),"/bioRad_tmp_files") #' dir.create(temp_dir) #' download_pvolfiles( @@ -32,6 +33,7 @@ #' ) #' # Clean up #' unlink(temp_dir, recursive = TRUE) +#' } download_pvolfiles <- function(date_min, date_max, radar, directory = ".", overwrite = FALSE, bucket = "noaa-nexrad-level2") { diff --git a/R/download_vpfiles.R b/R/download_vpfiles.R index 4535591ed..56fc54dc4 100644 --- a/R/download_vpfiles.R +++ b/R/download_vpfiles.R @@ -21,6 +21,7 @@ #' @export #' #' @seealso +#' * [read_vpts()] #' * [select_vpfiles()] #' * [read_vpfiles()] #' diff --git a/R/hooks.R b/R/hooks.R index 7d209b405..c8b5ca137 100644 --- a/R/hooks.R +++ b/R/hooks.R @@ -1,9 +1,10 @@ .onLoad <- function(libname, pkgname) { register_all_s3_methods() # dynamically registers non-imported pkgs (tidyverse) # nocov } + .onAttach <- function(libname, pkgname) { packageStartupMessage(paste("Welcome to", pkgname, "version", utils::packageVersion(pkgname))) - if (rlang::is_installed("vol2birdR")) { + if (requireNamespace("vol2birdR", quietly = TRUE)) { packageStartupMessage(paste("using vol2birdR version ", utils::packageVersion("vol2birdR"), ifelse(vol2birdR::mistnet_exists(), " (MistNet installed)", " (MistNet not installed)"), sep = "")) } } diff --git a/R/integrate_to_ppi.R b/R/integrate_to_ppi.R index f7af4a160..596491623 100644 --- a/R/integrate_to_ppi.R +++ b/R/integrate_to_ppi.R @@ -119,7 +119,9 @@ #' plot(ppi, param = "VID", zlim = c(0, 200)) #' #' # Download a basemap and map the ppi +#' if (all(sapply(c("ggspatial","prettymapr", "rosm"), requireNamespace, quietly = TRUE))) { #' map(ppi) +#' } #' #' # The ppi can also be projected on a user-defined raster, as follows: #' diff --git a/R/list_vpts_aloft.R b/R/list_vpts_aloft.R index 276c6d5c2..2e39a1650 100644 --- a/R/list_vpts_aloft.R +++ b/R/list_vpts_aloft.R @@ -16,7 +16,10 @@ #' @export #' #' @examples -#' list_vpts_aloft(radars = "bejab") +#' \donttest{ +#' if (requireNamespace("aws.s3", quietly = TRUE)) { +#' list_vpts_aloft(radars = "bejab", date_min='2018-10-01', date_max = '2018-12-31') +#' }} list_vpts_aloft <- function(date_min = NULL, date_max = NULL, radars = NULL, diff --git a/R/map.R b/R/map.R index d85f2803f..1fc1c2112 100644 --- a/R/map.R +++ b/R/map.R @@ -47,6 +47,7 @@ #' # Project a scan as a ppi #' ppi <- project_as_ppi(example_scan) #' \donttest{ +#' if (all(sapply(c("ggspatial","prettymapr", "rosm"), requireNamespace, quietly = TRUE))) { #' # Choose a basemap #' basemap <- rosm::osm.types()[1] #' @@ -71,6 +72,7 @@ #' # Crop the map #' map(ppi, map = basemap, xlim = c(12.4, 13.2), ylim = c(56, 56.5)) #' } +#' } map <- function(x, ...) { UseMethod("map", x) } @@ -108,7 +110,7 @@ map.ppi <- function(x, map="cartolight", param, alpha = 0.7, xlim, ylim, zlim = } # check that suggested dependencies are presetn - rlang::check_installed(c("ggspatial","prettymapr"),'to map ppi\'s') + rlang::check_installed(c("ggspatial","prettymapr", "rosm"),'to map ppi\'s') assert_valid_basemap <- function(basemap) { valid_types <- rosm::osm.types() @@ -160,7 +162,7 @@ map.ppi <- function(x, map="cartolight", param, alpha = 0.7, xlim, ylim, zlim = # convert to google earth mercator projection data <- suppressWarnings( - as.data.frame(sp::spTransform(as(data,"SpatialPointsDataFrame"), sp::CRS("+init=epsg:3857"))) + as.data.frame(sp::spTransform(methods::as(data,"SpatialPointsDataFrame"), sp::CRS("+init=epsg:3857"))) ) # bring z-values within plotting range index <- which(data$z < zlim[1]) diff --git a/R/nexrad_odim.R b/R/nexrad_odim.R index c69c3952c..c841b0ef8 100644 --- a/R/nexrad_odim.R +++ b/R/nexrad_odim.R @@ -29,13 +29,16 @@ #' #' new_path = file.path(tempdir(), "KBGM_example.h5") #' +#' if (requireNamespace("vol2birdR", quietly = TRUE)) { #' nexrad_to_odim(path, new_path) #' #' # verify that we have generated a polar volume in ODIM HDF5 format #' get_odim_object_type(new_path) #' #' # clean up -#' file.remove(path, new_path) +#' file.remove(new_path) +#' } +#' file.remove(path) #' } nexrad_to_odim <- function(pvolfile_nexrad, pvolfile_odim, verbose = FALSE) { assertthat::assert_that(dir.exists(dirname(pvolfile_odim)),msg=paste("output directory", dirname(pvolfile_odim), "not found")) diff --git a/R/read_pvolfile.R b/R/read_pvolfile.R index 5ad2e0adf..7823dd91f 100644 --- a/R/read_pvolfile.R +++ b/R/read_pvolfile.R @@ -53,7 +53,7 @@ #' #' # load the file: #' example_pvol <- read_pvolfile(pvolfile) -#' +#' #' # print summary info for the loaded polar volume: #' example_pvol #' diff --git a/R/read_vpts.R b/R/read_vpts.R index a96e8383e..ec3e1e061 100644 --- a/R/read_vpts.R +++ b/R/read_vpts.R @@ -91,32 +91,23 @@ read_vpts <- function(files, data_frame = FALSE, ...) { #' @noRd read_vpts_csv <- function(files, data_frame = FALSE) { - if (!exists("cached_schema")) { - # Read the schema from the URL and cache it - cached_schema <- jsonlite::fromJSON(system.file("extdata", "vpts-csv-table-schema.json", package = "bioRad"), simplifyDataFrame = FALSE, simplifyVector = TRUE) - cached_schema$missingValues <- c("", "NA") - } - - # Create Frictionless Data Package - package <- frictionless::create_package() - # Add resource to the package - package <- frictionless::add_resource( - package, - "vpts", - data = files, - schema = cached_schema - ) + #suppressMessages( + data <- readr::read_csv(files, show_col_types = FALSE) + #, + #col_types = readr::cols( + # .default = readr::col_guess(), + # `...1` = readr::col_skip() # Skip unnamed columns + # )) + #) - # Read resource (compares data with schema and binds rows of all files) - data <- frictionless::read_resource(package, "vpts") + #Validate the data + validate_vpts(data) # Convert data source_file <- datetime <- radar <- NULL data <- dplyr::mutate( data, - radar = as.factor(radar), - source_file = as.factor(source_file), datetime = as.POSIXct(datetime, format = "%Y-%m-%dT%H:%M:%SZ", tz = "UTC") ) diff --git a/R/select.R b/R/select.R index 150151a58..dc21c4c08 100644 --- a/R/select.R +++ b/R/select.R @@ -18,6 +18,7 @@ #' select(get_scan(example_pvol, 2.5), VRADH:ZDR) #' } # generalizations for the dplyr verb `select` to `scan` and `pvol` objects +#' @keywords internal select.scan <- function(.data, ...) { if (!requireNamespace("dplyr", quietly = TRUE)) { stop("package dplyr required, please install it first") # nocov @@ -34,6 +35,7 @@ select.scan <- function(.data, ...) { .data } +#' @keywords internal select.pvol <- function(.data, ...) { .data$scans <- lapply(.data$scans, select.scan, ...) .data @@ -41,8 +43,10 @@ select.pvol <- function(.data, ...) { register_all_s3_methods <- function() { # nocov start - register_s3_method("dplyr", "select", "scan") - register_s3_method("dplyr", "select", "pvol") + if (!(!requireNamespace("dplyr", quietly = TRUE) | !requireNamespace("rlang", quietly = TRUE) | !requireNamespace("tidyselect", quietly = TRUE))) { + register_s3_method("dplyr", "select", "scan") + register_s3_method("dplyr", "select", "pvol") + } # nocov end } diff --git a/R/sysdata.rda b/R/sysdata.rda new file mode 100644 index 000000000..800548d8c Binary files /dev/null and b/R/sysdata.rda differ diff --git a/R/utils.R b/R/utils.R index 33113c644..fe9d52beb 100644 --- a/R/utils.R +++ b/R/utils.R @@ -8,10 +8,54 @@ #' #' @return A matrix of the same dimension as `x`, with `TRUE`/`FALSE` values for #' whether each cell in the original data frame is a number or not. -#' @importFrom stats na.omit #' @keywords internal is.nan.data.frame <- function(x) do.call(cbind, lapply(x, is.nan)) +#' Skip test if no aws.s3 +#' +#' Some functions require package aws.s3 +#' This helper function allows to skip a test if aws.s3 is not available +#' Inspired by . +#' @return Invisibly returns TRUE if aws.s3 is available, otherwise skips the test with a message "Package aws.s3 not installed". +#' @keywords internal +skip_if_no_aws.s3 <- function() { + if (requireNamespace("aws.s3", quietly = TRUE)) { + return(invisible(TRUE)) + } + testthat::skip("Package aws.s3 not installed") +} + +#' Skip test if missing dependencies for mapping +#' +#' Function map depends on several spatial dependencies (ggspatial, prettymapr, rosm). +#' This helper function allows to skip a test if these dependencies are not available +#' Inspired by . +#' @return Invisibly returns TRUE if dependencies available, otherwise skips the test with a message "map() dependencies (ggspatial, prettymapr, rosm) not installed". +#' @keywords internal +skip_if_no_mapping <- function() { + if (all(sapply(c("ggspatial","prettymapr", "rosm"), requireNamespace, quietly = TRUE))){ + return(invisible(TRUE)) + } + testthat::skip("map() dependencies (ggspatial, prettymapr, rosm) not installed") +} + + +#' Skip test if no tidyselect +#' +#' dplyr select method require package tidyselect +#' This helper function allows to skip a test if tidyselect is not available +#' Inspired by . +#' @return Invisibly returns TRUE if tidyselect is available, otherwise skips the test with a message "Package tidyselect not installed". +#' @keywords internal +skip_if_no_tidyselect <- function() { + if (requireNamespace("tidyselect", quietly = TRUE)) { + return(invisible(TRUE)) + } + testthat::skip("Package tidyselect not installed") +} + + + #' Skip test if no mistnet #' #' Some functions require MistNet to be enabled in package vol2birdR. @@ -20,7 +64,7 @@ is.nan.data.frame <- function(x) do.call(cbind, lapply(x, is.nan)) #' @return Invisibly returns TRUE if MistNet is available, otherwise skips the test with a message "No MistNet". #' @keywords internal skip_if_no_mistnet <- function() { - if (rlang::is_installed("vol2birdR")) { + if (requireNamespace("vol2birdR", quietly = TRUE)) { if (vol2birdR::mistnet_exists()) { return(invisible(TRUE)) } @@ -37,7 +81,7 @@ skip_if_no_mistnet <- function() { #' a message "Package vol2birdR not installed". #' @keywords internal skip_if_no_vol2birdR <- function() { - if (rlang::is_installed("vol2birdR")) { + if (requireNamespace("vol2birdR", quietly = TRUE)) { return(invisible(TRUE)) } testthat::skip("Package vol2birdR not installed") @@ -61,7 +105,7 @@ check_radar_codes <- function(radars) { } else { # Load the JSON data from the new URL radars.json <- jsonlite::fromJSON("https://raw.githubusercontent.com/enram/aloftdata.eu/main/_data/OPERA_RADARS_DB.json") - radar_codes = na.omit(radars.json$odimcode) + radar_codes = stats::na.omit(radars.json$odimcode) wrong_codes <- radars[!(radars %in% radar_codes)] if (length(wrong_codes) > 0) { stop( @@ -92,7 +136,7 @@ check_date_format <- function(date, format) { } -#' A wrapper for [spTransform()]. +#' A wrapper for [sp::spTransform()]. #' Converts geographic (WGS84) coordinates to a specified projection #' #' @param lon Longitude @@ -113,7 +157,7 @@ wgs_to_proj <- function(lon, lat, proj4string) { return(res) } -#' A wrapper for [spTransform()]. +#' A wrapper for [sp::spTransform()]. #' Converts projected coordinates to geographic (WGS84) coordinates. #' #' @param x The x-coordinate in the projected system. @@ -242,69 +286,3 @@ guess_file_type <- function(file_path, n_lines = 5) { remainder_is_zero <- function(number, divisor) { return(number %% divisor == 0) } - -# Recursive function to extract variable names from frictionless schema - -#' @param lst the "fields" list of a frictionless schema -#' @returns a character vector of variable names in the order of a VPTS CSV schema -#' @keywords internal -#' @noRd -extract_names <- function(lst) { - if (is.list(lst)) { - names <- lapply(lst, function(x) extract_names(x$name)) - unlist(names) - } else { - lst - } -} - -#' Convert a tibble into a matrix -#' -#' Reshapes a tibble as an m✕n matrix of m distinct radar heights and -#' n observations (sweeps) ordered by time. Each tibble contains data of a single vpts attribute. -#' @param tibble A tibble in the format: datetime, height, variable, value. -#' @return A list with two elements: the 'variable' of interest and the reshaped matrix -#' @keywords internal -#' @noRd -tibble_to_mat <- function(tibble) { - unique_heights <- unique(tibble[["height"]]) - matrix_list <- lapply(unique_heights, function(height) { - height_subset <- tibble[tibble[["height"]] == height, ] - matrix(height_subset$value, nrow = 1) - }) - matrix <- do.call(rbind, matrix_list) - return(list(variable = tibble$variable[1], matrix = matrix)) -} - -#' Convert a vpts dataframe into an ordered list of matrices -#' -#' @param data A dataframe created from a VPTS CSV file -#' @param maskvars a character vector of radar variables to be masked from the input , e.g., c("radar_latitude", "radar_longitude", ...) -#' @param schema a frictionless schema -#' @returns A named list of matrices ordered according to radvars -#' @keywords internal -#' @noRd -df_to_mat_list <- function(data, maskvars, schema) { - datetime <- height <- variable <- fields <- dbz_all <- DBZH <- NULL - radvars <- extract_names(schema$fields) #allow DBZH as alternative to dbz_all - radvars <- radvars[!radvars %in% maskvars] - alt_radvar <- "DBZH" - insert_index <- which(radvars == "dbz_all") + 1 - radvars <- append(radvars, alt_radvar, after = insert_index) - tbls_lst <- data %>% - dplyr::select(c(setdiff(colnames(data), maskvars), "datetime", "height")) %>% - tidyr::pivot_longer(-c(datetime, height), names_to = "variable", values_to = "value") %>% - dplyr::group_by(variable) %>% - dplyr::group_split() - - unnamed_mat_list <- lapply(tbls_lst, tibble_to_mat) - var_names <- sapply(unnamed_mat_list, function(x) x$variable) - named_mat_list <- lapply(unnamed_mat_list, `[[`, "matrix") - names(named_mat_list) <- var_names - - subset_indices <- match(var_names, radvars) - ordered_subset <- var_names[order(subset_indices)] - - ordered_mat_list <- named_mat_list[ordered_subset] - return(ordered_mat_list) -} diff --git a/R/validate_vpts.R b/R/validate_vpts.R new file mode 100644 index 000000000..4e61128a4 --- /dev/null +++ b/R/validate_vpts.R @@ -0,0 +1,98 @@ +get_field_schema <- function(field, schema) { + for (i in seq_along(schema$fields$name)) { + if (field %in% c(schema$fields$name[i], schema$fields$nameAlternatives[[i]])) { + return(schema$fields[i, ]) + } + } + return(NULL) # return NULL if no matching field found +} +validate_datetime_format <- function(data, format) { + parsed_data <- tryCatch({ + as.POSIXct(data, format = format, tz = "UTC") + }, error = function(e) NULL) + # check for failed parsing + if (any(is.na(parsed_data))) { + return(FALSE) + } + return(TRUE) +} +#' Validate dataframe against VPTS schema +#' +#' @param df The dataframe to validate. +#' @noRd +#' @return Invisibly returns a list with validation results including messages for any issues found. +#' @keywords internal +validate_vpts <- function(df) { + schema <- bioRad::vpts_schema + required_fields <- schema$fields$name[schema$fields$constraints.required == + TRUE] + all_fields <- schema$fields$name + df_fields <- names(df) + + # Check for missing required fields + missing_required <- setdiff(required_fields, df_fields) + if (length(missing_required) > 0) { + warning("Missing required fields: ", paste(missing_required, collapse = ", ")) + } + + issues <- list() + extra_fields <- character() + + # Validate each field in the dataframe that is also in the schema + for (field in df_fields) { + + field_schema <- get_field_schema(field, schema) + if (!is.null(field_schema)) { + + field_data <- df[[field]] + # Validate type + type_valid <- switch(as.character(field_schema$type), string = is.character(field_data), + number = is.numeric(field_data), integer = is.integer(field_data) || + (is.numeric(field_data) && all(field_data == floor(field_data))), + datetime = inherits(field_data, "POSIXct") || inherits(field_data, + "POSIXt"), boolean = is.logical(field_data), stop("Unsupported type specified in schema for field: ", + field)) + if (!type_valid) { + issues <- c(issues, glue::glue("Type validation failed for {field}")) + } + + # Validate date-time format if specified + if (field_schema$type == "datetime" && !is.na(field_schema$format)) { + if (!validate_datetime_format(field_data, field_schema$format)) { + return(glue::glue("Date-time format validation failed for {field}")) + } + } + + # Validate constraints + if (!is.null(field_schema$constraints)) { + if (!is.na(field_schema$constraints$minimum) && any(field_data < + field_schema$constraints$minimum, na.rm = TRUE)) { + return(glue::glue("Minimum value constraint violated for {field}")) + } + if (!is.na(field_schema$constraints$maximum) && any(field_data > + field_schema$constraints$maximum, na.rm = TRUE)) { + return(glue::glue("Maximum value constraint violated for {field}")) + c} + if (!is.na(field_schema$constraints$pattern) && any(!stringr::str_detect(field_schema$constraints$pattern, + field_data))) { + return(glue::glue("Pattern constraint violated for {field}")) + } + } + } else { + extra_fields <- c(extra_fields, field) + } + } + + # Show extra fields + if (length(extra_fields) > 0) { + warning("Extra fields found: ", paste(extra_fields, collapse = ", ")) + } + + # Show validation issues + if (length(issues) > 0) { + warning("Validation issues found: ", paste(issues, collapse = "; ")) + } + + invisible(list(valid = TRUE, issues = issues)) +} + diff --git a/README.md b/README.md index 0d7285521..49c618213 100644 --- a/README.md +++ b/README.md @@ -23,24 +23,24 @@ calculate further summary statistics. To get started, see: -- [Dokter et al. (2019)](https://doi.org/10.1111/ecog.04028): a paper - describing the package. -- [bioRad - vignette](https://adriaandokter.com/bioRad/articles/bioRad.html): an - introduction to bioRad’s main functionalities. -- [Function - reference](https://adriaandokter.com/bioRad/reference/index.html): - an overview of all bioRad functions. -- [Introductory - exercises](https://adriaandokter.com/bioRad/articles/rad_aero_19.html): - a tutorial with code examples and exercises. +- [Dokter et al. (2019)](https://doi.org/10.1111/ecog.04028): a paper + describing the package. +- [bioRad + vignette](https://adriaandokter.com/bioRad/articles/bioRad.html): an + introduction to bioRad’s main functionalities. +- [Function + reference](https://adriaandokter.com/bioRad/reference/index.html): an + overview of all bioRad functions. +- [Introductory + exercises](https://adriaandokter.com/bioRad/articles/rad_aero_19.html): + a tutorial with code examples and exercises. More vignettes: -- [Range - correction](https://adriaandokter.com/bioRad/articles/range_correction.html): - estimate spatial images of vertically integrated density corrected - for range effects. +- [Range + correction](https://adriaandokter.com/bioRad/articles/range_correction.html): + estimate spatial images of vertically integrated density corrected for + range effects. Documentation for the latest development version can be found [here](https://adriaandokter.com/bioRad/dev/). @@ -97,8 +97,10 @@ Then load the package with: ``` r library(bioRad) -#> Welcome to bioRad version 0.7.3 -#> using vol2birdR version 1.0.1 (MistNet installed) +#> Welcome to bioRad version 0.8.1 +#> Attempting to load MistNet from:/Users/amd427/Library/R/x86_64/4.3/library/vol2birdR/lib +#> MistNet successfully initialized. +#> using vol2birdR version 1.0.3 (MistNet installed) ``` ### (optional) Enable MistNet @@ -196,12 +198,11 @@ tutorial](https://adriaandokter.com/bioRad/articles/rad_aero_19.html). ## Meta -- We welcome - [contributions](https://adriaandokter.com/bioRad/CONTRIBUTING.html) - including bug reports. -- License: MIT -- Get citation information for `bioRad` in R doing - `citation("bioRad")`. -- Please note that this project is released with a [Contributor Code - of Conduct](https://adriaandokter.com/bioRad/CODE_OF_CONDUCT.html). - By participating in this project you agree to abide by its terms. +- We welcome + [contributions](https://adriaandokter.com/bioRad/CONTRIBUTING.html) + including bug reports. +- License: MIT +- Get citation information for `bioRad` in R doing `citation("bioRad")`. +- Please note that this project is released with a [Contributor Code of + Conduct](https://adriaandokter.com/bioRad/CODE_OF_CONDUCT.html). By + participating in this project you agree to abide by its terms. diff --git a/codemeta.json b/codemeta.json index 0da62e340..7c64dfa12 100644 --- a/codemeta.json +++ b/codemeta.json @@ -8,13 +8,13 @@ "codeRepository": "https://github.com/adokter/bioRad/", "issueTracker": "https://github.com/adokter/bioRad/issues", "license": "https://spdx.org/licenses/MIT", - "version": "0.7.3", + "version": "0.8.1", "programmingLanguage": { "@type": "ComputerLanguage", "name": "R", "url": "https://r-project.org" }, - "runtimePlatform": "R version 4.3.0 (2023-04-21)", + "runtimePlatform": "R version 4.4.1 (2024-06-14)", "provider": { "@id": "https://cran.r-project.org", "@type": "Organization", @@ -26,7 +26,7 @@ "@type": "Person", "givenName": "Adriaan M.", "familyName": "Dokter", - "email": "amd427@cornell.edu", + "email": "biorad@cornell.edu", "@id": "https://orcid.org/0000-0001-6573-066X" }, { @@ -126,7 +126,7 @@ "@type": "Person", "givenName": "Adriaan M.", "familyName": "Dokter", - "email": "amd427@cornell.edu", + "email": "biorad@cornell.edu", "@id": "https://orcid.org/0000-0001-6573-066X" } ], @@ -203,18 +203,6 @@ }, "sameAs": "https://CRAN.R-project.org/package=rosm" }, - { - "@type": "SoftwareApplication", - "identifier": "sf", - "name": "sf", - "provider": { - "@id": "https://cran.r-project.org", - "@type": "Organization", - "name": "Comprehensive R Archive Network (CRAN)", - "url": "https://cran.r-project.org" - }, - "sameAs": "https://CRAN.R-project.org/package=sf" - }, { "@type": "SoftwareApplication", "identifier": "testthat", @@ -228,18 +216,6 @@ }, "sameAs": "https://CRAN.R-project.org/package=testthat" }, - { - "@type": "SoftwareApplication", - "identifier": "tidyselect", - "name": "tidyselect", - "provider": { - "@id": "https://cran.r-project.org", - "@type": "Organization", - "name": "Comprehensive R Archive Network (CRAN)", - "url": "https://cran.r-project.org" - }, - "sameAs": "https://CRAN.R-project.org/package=tidyselect" - }, { "@type": "SoftwareApplication", "identifier": "vdiffr", @@ -322,18 +298,6 @@ "sameAs": "https://CRAN.R-project.org/package=fields" }, "6": { - "@type": "SoftwareApplication", - "identifier": "frictionless", - "name": "frictionless", - "provider": { - "@id": "https://cran.r-project.org", - "@type": "Organization", - "name": "Comprehensive R Archive Network (CRAN)", - "url": "https://cran.r-project.org" - }, - "sameAs": "https://CRAN.R-project.org/package=frictionless" - }, - "7": { "@type": "SoftwareApplication", "identifier": "ggplot2", "name": "ggplot2", @@ -345,7 +309,7 @@ }, "sameAs": "https://CRAN.R-project.org/package=ggplot2" }, - "8": { + "7": { "@type": "SoftwareApplication", "identifier": "glue", "name": "glue", @@ -357,12 +321,12 @@ }, "sameAs": "https://CRAN.R-project.org/package=glue" }, - "9": { + "8": { "@type": "SoftwareApplication", "identifier": "graphics", "name": "graphics" }, - "10": { + "9": { "@type": "SoftwareApplication", "identifier": "jsonlite", "name": "jsonlite", @@ -374,7 +338,7 @@ }, "sameAs": "https://CRAN.R-project.org/package=jsonlite" }, - "11": { + "10": { "@type": "SoftwareApplication", "identifier": "lubridate", "name": "lubridate", @@ -386,7 +350,7 @@ }, "sameAs": "https://CRAN.R-project.org/package=lubridate" }, - "12": { + "11": { "@type": "SoftwareApplication", "identifier": "lutz", "name": "lutz", @@ -398,12 +362,12 @@ }, "sameAs": "https://CRAN.R-project.org/package=lutz" }, - "13": { + "12": { "@type": "SoftwareApplication", "identifier": "methods", "name": "methods" }, - "14": { + "13": { "@type": "SoftwareApplication", "identifier": "raster", "name": "raster", @@ -415,7 +379,7 @@ }, "sameAs": "https://CRAN.R-project.org/package=raster" }, - "15": { + "14": { "@type": "SoftwareApplication", "identifier": "readr", "name": "readr", @@ -427,7 +391,7 @@ }, "sameAs": "https://CRAN.R-project.org/package=readr" }, - "16": { + "15": { "@type": "SoftwareApplication", "identifier": "rhdf5", "name": "rhdf5", @@ -439,7 +403,7 @@ }, "sameAs": "https://bioconductor.org/packages/release/bioc/html/rhdf5.html" }, - "17": { + "16": { "@type": "SoftwareApplication", "identifier": "rlang", "name": "rlang", @@ -451,6 +415,18 @@ }, "sameAs": "https://CRAN.R-project.org/package=rlang" }, + "17": { + "@type": "SoftwareApplication", + "identifier": "sf", + "name": "sf", + "provider": { + "@id": "https://cran.r-project.org", + "@type": "Organization", + "name": "Comprehensive R Archive Network (CRAN)", + "url": "https://cran.r-project.org" + }, + "sameAs": "https://CRAN.R-project.org/package=sf" + }, "18": { "@type": "SoftwareApplication", "identifier": "sp", @@ -469,6 +445,18 @@ "name": "stats" }, "20": { + "@type": "SoftwareApplication", + "identifier": "stringr", + "name": "stringr", + "provider": { + "@id": "https://cran.r-project.org", + "@type": "Organization", + "name": "Comprehensive R Archive Network (CRAN)", + "url": "https://cran.r-project.org" + }, + "sameAs": "https://CRAN.R-project.org/package=stringr" + }, + "21": { "@type": "SoftwareApplication", "identifier": "suntools", "name": "suntools", @@ -480,7 +468,7 @@ }, "sameAs": "https://CRAN.R-project.org/package=suntools" }, - "21": { + "22": { "@type": "SoftwareApplication", "identifier": "tidyr", "name": "tidyr", @@ -492,12 +480,24 @@ }, "sameAs": "https://CRAN.R-project.org/package=tidyr" }, - "22": { + "23": { + "@type": "SoftwareApplication", + "identifier": "tidyselect", + "name": "tidyselect", + "provider": { + "@id": "https://cran.r-project.org", + "@type": "Organization", + "name": "Comprehensive R Archive Network (CRAN)", + "url": "https://cran.r-project.org" + }, + "sameAs": "https://CRAN.R-project.org/package=tidyselect" + }, + "24": { "@type": "SoftwareApplication", "identifier": "utils", "name": "utils" }, - "23": { + "25": { "@type": "SoftwareApplication", "identifier": "viridis", "name": "viridis", @@ -509,7 +509,7 @@ }, "sameAs": "https://CRAN.R-project.org/package=viridis" }, - "24": { + "26": { "@type": "SoftwareApplication", "identifier": "viridisLite", "name": "viridisLite", @@ -523,7 +523,7 @@ }, "SystemRequirements": null }, - "fileSize": "4879.903KB", + "fileSize": "9111.507KB", "citation": [ { "@type": "ScholarlyArticle", diff --git a/cran-comments.md b/cran-comments.md index f488aa37c..feb293d00 100644 --- a/cran-comments.md +++ b/cran-comments.md @@ -1,8 +1,4 @@ -## bioRad 0.7.3 +## bioRad 0.8.1 -Adressing CRAN errors related to stamen maps brownouts, and resulting issues in dependency ggmap. - -Fixes: -1. Migrated from ggmap to ggspatial -2. Adressed all CRAN build issues -3. some small bugfixes +* examples in dontrun blocks now also use suggested packages conditionally +* includes a small bugfix for function as.vpts() introduced in version 0.8.0 diff --git a/data/vpts_schema.rda b/data/vpts_schema.rda new file mode 100644 index 000000000..800548d8c Binary files /dev/null and b/data/vpts_schema.rda differ diff --git a/man/apply_mistnet.Rd b/man/apply_mistnet.Rd index 0d89e220d..fa0c5bf17 100644 --- a/man/apply_mistnet.Rd +++ b/man/apply_mistnet.Rd @@ -103,6 +103,7 @@ MistNet may run more slowly on Windows than on Linux or Mac OS X. \examples{ \donttest{ # make sure you have installed the MistNet libraries and model, using: +if (requireNamespace("vol2birdR", quietly = TRUE)){ if(!vol2birdR::mistnet_exists()){ vol2birdR::install_mistnet() vol2birdR::install_mistnet_model() @@ -144,6 +145,7 @@ plot(ppi, param = "CELL") file.remove(tempfile) } } +} \references{ Please cite this publication when using MistNet: \itemize{ diff --git a/man/beam_distance.Rd b/man/beam_distance.Rd index 2d463693e..11ca391a3 100644 --- a/man/beam_distance.Rd +++ b/man/beam_distance.Rd @@ -41,8 +41,8 @@ beam_distance(100000, 5) Other beam_functions: \code{\link{beam_height}()}, -\code{\link{beam_profile_overlap}()}, \code{\link{beam_profile}()}, +\code{\link{beam_profile_overlap}()}, \code{\link{beam_range}()}, \code{\link{beam_width}()}, \code{\link{gaussian_beam_profile}()} diff --git a/man/beam_height.Rd b/man/beam_height.Rd index 787278a33..4a559557e 100644 --- a/man/beam_height.Rd +++ b/man/beam_height.Rd @@ -58,8 +58,8 @@ plot(range, beam_height(range, 0.5), ylab = "beam height [m]", xlab = "range [m] Other beam_functions: \code{\link{beam_distance}()}, -\code{\link{beam_profile_overlap}()}, \code{\link{beam_profile}()}, +\code{\link{beam_profile_overlap}()}, \code{\link{beam_range}()}, \code{\link{beam_width}()}, \code{\link{gaussian_beam_profile}()} diff --git a/man/beam_range.Rd b/man/beam_range.Rd index 1e45b78c6..29b23a23d 100644 --- a/man/beam_range.Rd +++ b/man/beam_range.Rd @@ -38,8 +38,8 @@ beam_range(100000, 5) Other beam_functions: \code{\link{beam_distance}()}, \code{\link{beam_height}()}, -\code{\link{beam_profile_overlap}()}, \code{\link{beam_profile}()}, +\code{\link{beam_profile_overlap}()}, \code{\link{beam_width}()}, \code{\link{gaussian_beam_profile}()} } diff --git a/man/beam_width.Rd b/man/beam_width.Rd index f21165cfc..fdbe53469 100644 --- a/man/beam_width.Rd +++ b/man/beam_width.Rd @@ -32,8 +32,8 @@ plot(range, beam_width(range), ylab = "beam width [m]", xlab = "range [m]") Other beam_functions: \code{\link{beam_distance}()}, \code{\link{beam_height}()}, -\code{\link{beam_profile_overlap}()}, \code{\link{beam_profile}()}, +\code{\link{beam_profile_overlap}()}, \code{\link{beam_range}()}, \code{\link{gaussian_beam_profile}()} } diff --git a/man/bioRad-package.Rd b/man/bioRad-package.Rd index b17ddc388..4aa304f6d 100644 --- a/man/bioRad-package.Rd +++ b/man/bioRad-package.Rd @@ -31,7 +31,7 @@ Useful links: } \author{ -\strong{Maintainer}: Adriaan M. Dokter \email{amd427@cornell.edu} (\href{https://orcid.org/0000-0001-6573-066X}{ORCID}) +\strong{Maintainer}: Adriaan M. Dokter \email{biorad@cornell.edu} (\href{https://orcid.org/0000-0001-6573-066X}{ORCID}) Authors: \itemize{ diff --git a/man/calculate_vp.Rd b/man/calculate_vp.Rd index b9609474a..392e32117 100644 --- a/man/calculate_vp.Rd +++ b/man/calculate_vp.Rd @@ -217,11 +217,12 @@ be downloaded at \url{https://s3.amazonaws.com/mistnet/mistnet_nexrad.pt}. # Locate and read the polar volume example file pvolfile_source <- system.file("extdata", "volume.h5", package = "bioRad") -# Copy the file to a home directory with read/write permissions +# Copy the file to a temporary directory with read/write permissions pvolfile <- paste0(tempdir(),"/volume.h5") file.copy(pvolfile_source, pvolfile) # Calculate the profile +if (requireNamespace("vol2birdR", quietly = TRUE)) { vp <- calculate_vp(pvolfile) # Get summary info @@ -229,6 +230,9 @@ vp # Clean up file.remove(pvolfile) + +} + } \references{ Dokter et al. (2011) is the main reference for the profiling algorithm diff --git a/man/composite_ppi.Rd b/man/composite_ppi.Rd index 58cb89f24..859e1be9c 100644 --- a/man/composite_ppi.Rd +++ b/man/composite_ppi.Rd @@ -121,6 +121,8 @@ ppis <- lapply(pvol$scans, project_as_ppi, grid_size=1000) composite <- composite_ppi(ppis, method = "max", res=1000) # Plot the calculated max product on the basemap +if (all(sapply(c("ggspatial","prettymapr", "rosm"), requireNamespace, quietly = TRUE))) { map(composite) } } +} diff --git a/man/download_pvolfiles.Rd b/man/download_pvolfiles.Rd index f11ea8344..f2ad3ed63 100644 --- a/man/download_pvolfiles.Rd +++ b/man/download_pvolfiles.Rd @@ -41,6 +41,7 @@ Download a selection of polar volume (\code{pvol}) files from the } \examples{ # create temporary directory +if (requireNamespace("aws.s3", quietly = TRUE)) { temp_dir <- paste0(tempdir(),"/bioRad_tmp_files") dir.create(temp_dir) download_pvolfiles( @@ -53,3 +54,4 @@ download_pvolfiles( # Clean up unlink(temp_dir, recursive = TRUE) } +} diff --git a/man/download_vpfiles.Rd b/man/download_vpfiles.Rd index 08084b337..1dca68b91 100644 --- a/man/download_vpfiles.Rd +++ b/man/download_vpfiles.Rd @@ -53,6 +53,7 @@ download_vpfiles( } \seealso{ \itemize{ +\item \code{\link[=read_vpts]{read_vpts()}} \item \code{\link[=select_vpfiles]{select_vpfiles()}} \item \code{\link[=read_vpfiles]{read_vpfiles()}} } diff --git a/man/gaussian_beam_profile.Rd b/man/gaussian_beam_profile.Rd index edb080804..3dd28fe62 100644 --- a/man/gaussian_beam_profile.Rd +++ b/man/gaussian_beam_profile.Rd @@ -53,8 +53,8 @@ Beam profile is calculated using \link{beam_height} and \link{beam_width}. \code Other beam_functions: \code{\link{beam_distance}()}, \code{\link{beam_height}()}, -\code{\link{beam_profile_overlap}()}, \code{\link{beam_profile}()}, +\code{\link{beam_profile_overlap}()}, \code{\link{beam_range}()}, \code{\link{beam_width}()} } diff --git a/man/integrate_to_ppi.Rd b/man/integrate_to_ppi.Rd index 65aa999a8..3f5e9fa08 100644 --- a/man/integrate_to_ppi.Rd +++ b/man/integrate_to_ppi.Rd @@ -179,7 +179,9 @@ ppi <- integrate_to_ppi(pvol, example_vp, res = 2000) plot(ppi, param = "VID", zlim = c(0, 200)) # Download a basemap and map the ppi +if (all(sapply(c("ggspatial","prettymapr", "rosm"), requireNamespace, quietly = TRUE))) { map(ppi) +} # The ppi can also be projected on a user-defined raster, as follows: diff --git a/man/list_vpts_aloft.Rd b/man/list_vpts_aloft.Rd index 7ac867657..18c16f0d4 100644 --- a/man/list_vpts_aloft.Rd +++ b/man/list_vpts_aloft.Rd @@ -39,5 +39,8 @@ List aloft urls for time series of vertical profiles (\code{vpts}) of radar stations } \examples{ -list_vpts_aloft(radars = "bejab") +\donttest{ +if (requireNamespace("aws.s3", quietly = TRUE)) { +list_vpts_aloft(radars = "bejab", date_min='2018-10-01', date_max = '2018-12-31') +}} } diff --git a/man/map.Rd b/man/map.Rd index a845e6d88..989c92334 100644 --- a/man/map.Rd +++ b/man/map.Rd @@ -88,6 +88,7 @@ model (ODIM), see Table 16 in the # Project a scan as a ppi ppi <- project_as_ppi(example_scan) \donttest{ +if (all(sapply(c("ggspatial","prettymapr", "rosm"), requireNamespace, quietly = TRUE))) { # Choose a basemap basemap <- rosm::osm.types()[1] @@ -113,6 +114,7 @@ map(ppi, map = basemap, radar_size = 5, radar_color = "blue") map(ppi, map = basemap, xlim = c(12.4, 13.2), ylim = c(56, 56.5)) } } +} \seealso{ \itemize{ \item \code{\link[=project_as_ppi]{project_as_ppi()}} diff --git a/man/nexrad_to_odim.Rd b/man/nexrad_to_odim.Rd index a05fe263e..100524b1c 100644 --- a/man/nexrad_to_odim.Rd +++ b/man/nexrad_to_odim.Rd @@ -40,12 +40,15 @@ download.file(paste0("https://noaa-nexrad-level2.s3.amazonaws.com/", new_path = file.path(tempdir(), "KBGM_example.h5") +if (requireNamespace("vol2birdR", quietly = TRUE)) { nexrad_to_odim(path, new_path) # verify that we have generated a polar volume in ODIM HDF5 format get_odim_object_type(new_path) # clean up -file.remove(path, new_path) +file.remove(new_path) +} +file.remove(path) } } diff --git a/man/proj_to_wgs.Rd b/man/proj_to_wgs.Rd index babda2201..ab2275da4 100644 --- a/man/proj_to_wgs.Rd +++ b/man/proj_to_wgs.Rd @@ -2,7 +2,7 @@ % Please edit documentation in R/utils.R \name{proj_to_wgs} \alias{proj_to_wgs} -\title{A wrapper for \code{\link[=spTransform]{spTransform()}}. +\title{A wrapper for \code{\link[sp:spTransform]{sp::spTransform()}}. Converts projected coordinates to geographic (WGS84) coordinates.} \usage{ proj_to_wgs(x, y, proj4string) @@ -18,7 +18,7 @@ proj_to_wgs(x, y, proj4string) An object of class \code{SpatialPoints}. } \description{ -A wrapper for \code{\link[=spTransform]{spTransform()}}. +A wrapper for \code{\link[sp:spTransform]{sp::spTransform()}}. Converts projected coordinates to geographic (WGS84) coordinates. } \keyword{internal} diff --git a/man/skip_if_no_aws.s3.Rd b/man/skip_if_no_aws.s3.Rd new file mode 100644 index 000000000..adaf1c8d9 --- /dev/null +++ b/man/skip_if_no_aws.s3.Rd @@ -0,0 +1,17 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/utils.R +\name{skip_if_no_aws.s3} +\alias{skip_if_no_aws.s3} +\title{Skip test if no aws.s3} +\usage{ +skip_if_no_aws.s3() +} +\value{ +Invisibly returns TRUE if aws.s3 is available, otherwise skips the test with a message "Package aws.s3 not installed". +} +\description{ +Some functions require package aws.s3 +This helper function allows to skip a test if aws.s3 is not available +Inspired by \url{https://testthat.r-lib.org/articles/skipping.html#helpers}. +} +\keyword{internal} diff --git a/man/skip_if_no_mapping.Rd b/man/skip_if_no_mapping.Rd new file mode 100644 index 000000000..c8b37c1de --- /dev/null +++ b/man/skip_if_no_mapping.Rd @@ -0,0 +1,17 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/utils.R +\name{skip_if_no_mapping} +\alias{skip_if_no_mapping} +\title{Skip test if missing dependencies for mapping} +\usage{ +skip_if_no_mapping() +} +\value{ +Invisibly returns TRUE if dependencies available, otherwise skips the test with a message "map() dependencies (ggspatial, prettymapr, rosm) not installed". +} +\description{ +Function map depends on several spatial dependencies (ggspatial, prettymapr, rosm). +This helper function allows to skip a test if these dependencies are not available +Inspired by \url{https://testthat.r-lib.org/articles/skipping.html#helpers}. +} +\keyword{internal} diff --git a/man/skip_if_no_tidyselect.Rd b/man/skip_if_no_tidyselect.Rd new file mode 100644 index 000000000..7c3705d31 --- /dev/null +++ b/man/skip_if_no_tidyselect.Rd @@ -0,0 +1,17 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/utils.R +\name{skip_if_no_tidyselect} +\alias{skip_if_no_tidyselect} +\title{Skip test if no tidyselect} +\usage{ +skip_if_no_tidyselect() +} +\value{ +Invisibly returns TRUE if tidyselect is available, otherwise skips the test with a message "Package tidyselect not installed". +} +\description{ +dplyr select method require package tidyselect +This helper function allows to skip a test if tidyselect is not available +Inspired by \url{https://testthat.r-lib.org/articles/skipping.html#helpers}. +} +\keyword{internal} diff --git a/man/tidyverse.Rd b/man/tidyverse.Rd index 6434c74b5..db672d2b1 100644 --- a/man/tidyverse.Rd +++ b/man/tidyverse.Rd @@ -5,7 +5,7 @@ \alias{select.scan} \title{Tidyverse methods for bioRad objects} \usage{ -\method{select}{scan}(.data, ...) +select.scan(.data, ...) } \arguments{ \item{.data}{data object of class \code{scan} or \code{pvol}} @@ -29,3 +29,4 @@ get_scan(pvol_selected, 1.5) select(get_scan(example_pvol, 2.5), VRADH:ZDR) } } +\keyword{internal} diff --git a/man/vpts_schema.Rd b/man/vpts_schema.Rd new file mode 100644 index 000000000..e92da542b --- /dev/null +++ b/man/vpts_schema.Rd @@ -0,0 +1,23 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/data.R +\docType{data} +\name{vpts_schema} +\alias{vpts_schema} +\title{ENRAM-defined VPTS schema} +\format{ +An object of class \code{list} of length 3. +} +\usage{ +vpts_schema +} +\value{ +A list object containing the data model for the standard VPTS exchange format. +} +\description{ +A dataset containing the schema definitions for VPTS data validation. +} +\examples{ +# Reload vpts_schema from package (e.g. in case it was altered) +data(vpts_schema) +} +\keyword{internal} diff --git a/man/wgs_to_proj.Rd b/man/wgs_to_proj.Rd index 37272405e..4e33efeeb 100644 --- a/man/wgs_to_proj.Rd +++ b/man/wgs_to_proj.Rd @@ -2,7 +2,7 @@ % Please edit documentation in R/utils.R \name{wgs_to_proj} \alias{wgs_to_proj} -\title{A wrapper for \code{\link[=spTransform]{spTransform()}}. +\title{A wrapper for \code{\link[sp:spTransform]{sp::spTransform()}}. Converts geographic (WGS84) coordinates to a specified projection} \usage{ wgs_to_proj(lon, lat, proj4string) @@ -18,7 +18,7 @@ wgs_to_proj(lon, lat, proj4string) An object of class \code{SpatialPoints}. } \description{ -A wrapper for \code{\link[=spTransform]{spTransform()}}. +A wrapper for \code{\link[sp:spTransform]{sp::spTransform()}}. Converts geographic (WGS84) coordinates to a specified projection } \keyword{internal} diff --git a/tests/testthat/test-as.vpts.R b/tests/testthat/test-as.vpts.R index 51f2bc762..1c41d083e 100644 --- a/tests/testthat/test-as.vpts.R +++ b/tests/testthat/test-as.vpts.R @@ -8,21 +8,37 @@ test_that("as.vpts() returns error message for incorrect data", { expect_error(as.vpts(df),"identical") }) +test_that("as.vpts() handles multiple unique attribute values correctly", { -test_that("as.vpts() converts reflectivity `dbz_all` into 'DBZH'", { + original_df <- read.csv(system.file("extdata", "example_vpts.csv", package = "bioRad")) - file <- system.file("extdata", "example_vpts.csv", package = "bioRad") + if (nrow(original_df) > 1) { + df <- original_df + df$radar_longitude[2] <- df$radar_longitude[1] + 0.1 # Change longitude slightly + expected_warning <- "multiple radar_longitude values found" + expect_warning(as.vpts(df), regexp=expected_warning) + } - # When as.vpts() is called via read_vpts(), the reflectivity variable is named dbz_all in the resulting data.frame - vpts_df <- read_vpts(file, data_frame=TRUE) - expect_true(!"DBZH" %in% colnames(vpts_df)) - expect_true("dbz_all" %in% colnames(vpts_df)) + if (nrow(original_df) > 1) { + df <- original_df + df$radar_latitude[2] <- df$radar_latitude[1] + 0.1 # Change longitude slightly + expected_warning <- "multiple radar_latitude values found" + expect_warning(as.vpts(df), regexp=expected_warning) + } - # When as.vpts() is called on a dataframe, the reflectivity variable will be renamed DBZH in the resulting vpts object - vpts_obj <- as.vpts(vpts_df) - expect_true("DBZH" %in% names(vpts_obj$data)) - expect_true(!"dbz_all" %in% names(vpts_obj$data)) + if (nrow(original_df) > 1) { + df <- original_df + df$rcs[2] <- df$rcs[1] * 1.1 # Change rcs slightly + expected_warning <- "multiple rcs values found" + expect_warning(as.vpts(df), regexp=expected_warning) + } + if (nrow(original_df) > 1) { + df <- original_df + df$sd_vvp_threshold[2] <- df$sd_vvp_threshold[1] + 0.1 # Change sd_vvp_threshold slightly + expected_warning <- "multiple sd_vvp_threshold values found" + expect_warning(as.vpts(df), regexp=expected_warning) + } }) # Test that the function issues a correct warning for multiple radar_longitude values @@ -31,8 +47,8 @@ test_that("Warning is issued for multiple radar_longitude values", { vpts_df <- read_vpts(file, data_frame=TRUE) vpts_df$radar_longitude[1] <- vpts_df$radar_longitude[1] + 0.1 expect_warning( - modified_df <- as.vpts(vpts_df), - "multiple `radar_longitude` values found" + as.vpts(vpts_df), + regexp="multiple radar_longitude values found" ) }) @@ -42,9 +58,9 @@ test_that("values are set to the first for multi-value attributes", { vpts_df <- read_vpts(file, data_frame=TRUE) vpts_df$radar_longitude[1] <- vpts_df$radar_longitude[1] + 0.1 expect_warning( - vpts_obj <- as.vpts(vpts_df), - "multiple `radar_longitude` values found" + as.vpts(vpts_df), + regexp="multiple radar_longitude values found" ) - expect_equal(vpts_obj$attributes$where$lon, vpts_df$radar_longitude[1]) + #expect_equal(vpts_obj$attributes$where$lon, vpts_df$radar_longitude[1]) }) diff --git a/tests/testthat/test-download_pvolfiles.R b/tests/testthat/test-download_pvolfiles.R index 095a46d92..5e6875ebf 100644 --- a/tests/testthat/test-download_pvolfiles.R +++ b/tests/testthat/test-download_pvolfiles.R @@ -7,6 +7,7 @@ overwrite <- TRUE test_that("date input for download_pvolfiles() ", { skip_if_offline() + skip_if_no_aws.s3() # working with default expect_no_error( suppressMessages( @@ -108,6 +109,7 @@ test_that("date input for download_pvolfiles() ", { test_that("Check radar code for download_pvolfiles() ", { skip_if_offline() + skip_if_no_aws.s3() expect_error( download_pvolfiles(date_min, date_max, @@ -132,6 +134,7 @@ test_that("Check radar code for download_pvolfiles() ", { test_that("Check path and overwrite for download_pvolfiles() ", { skip_if_offline() + skip_if_no_aws.s3() expect_error( download_pvolfiles(date_min, date_max, radars, 1, overwrite), "path is not a string (a length one character vector)", diff --git a/tests/testthat/test-list_vpts_aloft.R b/tests/testthat/test-list_vpts_aloft.R index 6ae77701c..a1d530f75 100644 --- a/tests/testthat/test-list_vpts_aloft.R +++ b/tests/testthat/test-list_vpts_aloft.R @@ -1,4 +1,6 @@ test_that("list_vpts_aloft() returns error for unknown source", { + skip_if_no_aws.s3() + skip_if_offline() expect_error( list_vpts_aloft( date_min = "2000-01-01", @@ -12,6 +14,8 @@ test_that("list_vpts_aloft() returns error for unknown source", { }) test_that("list_vpts_aloft() returns error for invalid format", { + skip_if_no_aws.s3() + skip_if_offline() expect_error( list_vpts_aloft( date_min = "2000-01-01", @@ -25,6 +29,7 @@ test_that("list_vpts_aloft() returns error for invalid format", { }) test_that("list_vpts_aloft() returns error if radar doesn't exist", { + skip_if_no_aws.s3() skip_if_offline() expect_error( list_vpts_aloft( @@ -38,6 +43,7 @@ test_that("list_vpts_aloft() returns error if radar doesn't exist", { }) test_that("list_vpts_aloft() returns a character vector", { + skip_if_no_aws.s3() skip_if_offline() expect_type( list_vpts_aloft( @@ -50,6 +56,7 @@ test_that("list_vpts_aloft() returns a character vector", { }) test_that("list_vpts_aloft() returns no warning when all dates are specified", { + skip_if_no_aws.s3() skip_if_offline() expect_no_warning( list_vpts_aloft( @@ -61,6 +68,7 @@ test_that("list_vpts_aloft() returns no warning when all dates are specified", { }) test_that("list_vpts_aloft() works without specifying dates", { + skip_if_no_aws.s3() skip_if_offline() # just date_min expect_no_error( @@ -85,6 +93,7 @@ test_that("list_vpts_aloft() works without specifying dates", { }) test_that("list_vpts_aloft() returns all data when no dates are provided", { + skip_if_no_aws.s3() skip_if_offline() expect_gt( length( @@ -104,6 +113,7 @@ test_that("list_vpts_aloft() returns all data when no dates are provided", { test_that("list_vpts_aloft() warns if data was found for subset of radars or if not all dates were found", { + skip_if_no_aws.s3() skip_if_offline() expect_warning( list_vpts_aloft( @@ -124,6 +134,7 @@ test_that("list_vpts_aloft() warns if data was found for subset of radars or if }) test_that("list_vpts_aloft() warns and returns emtpy vector on no data found",{ + skip_if_no_aws.s3() skip_if_offline() expect_equal( list_vpts_aloft( @@ -145,6 +156,7 @@ test_that("list_vpts_aloft() warns and returns emtpy vector on no data found",{ }) test_that("list_vpts_aloft() silences warnings with show_warnings argument", { + skip_if_no_aws.s3() skip_if_offline() expect_no_warning( list_vpts_aloft( diff --git a/tests/testthat/test-map.R b/tests/testthat/test-map.R index caea350bd..dd08ff994 100644 --- a/tests/testthat/test-map.R +++ b/tests/testthat/test-map.R @@ -1,4 +1,5 @@ test_that("map() returns error on incorrect parameters", { + skip_if_no_mapping() skip_if_offline() ppi <- project_as_ppi(example_scan) basemap <- rosm::osm.types()[1] @@ -40,5 +41,4 @@ test_that("map() returns error on incorrect parameters", { regexp = "palette should be a character vector with hex color values", fixed = TRUE ) - }) diff --git a/tests/testthat/test-read_vpts.R b/tests/testthat/test-read_vpts.R index df15d766e..c6805e543 100644 --- a/tests/testthat/test-read_vpts.R +++ b/tests/testthat/test-read_vpts.R @@ -167,18 +167,31 @@ test_that("read_vpts() returns error on multiple radars in VPTS CSV files", { ) }) +test_that("VPTS CSV column dbz_all is renamed to DBZH", { + vptsfile <- system.file("extdata", "example_vpts.csv", package = "bioRad") + expect_in("dbz_all",names(read.csv(vptsfile))) + my_vpts <- read_vpts(vptsfile) + expect_in("DBZH",names(example_vpts$data)) +}) + +test_that("gap column is logical", { + vptsfile <- system.file("extdata", "example_vpts.csv", package = "bioRad") + my_vpts <- read_vpts(vptsfile) + expect_true(is.logical(example_vpts$data$gap)) +}) test_that("check ability to convert a vpts object into a data.frame, and then cast it back into a vpts", { vptsfile <- system.file("extdata", "example_vpts.csv", package = "bioRad") my_vpts <- read_vpts(vptsfile) - res <- as.vpts(as.data.frame(my_vpts)) + res <- as.vpts(as.data.frame(my_vpts, suntime=FALSE)) expect_true(is.vpts(res)) }) -# Comapre read_vpts output from data in both formats +# Compare read_vpts output from data in both formats test_that("read_vpts() returns equal summaries from h5 and csv files from 1 day of data", { skip_if_offline() + skip_if_no_aws.s3() # clear directories file.remove(list.files(h5_dir, full.names = TRUE)) diff --git a/tests/testthat/test-select.R b/tests/testthat/test-select.R index b5b0c9128..feed8c661 100644 --- a/tests/testthat/test-select.R +++ b/tests/testthat/test-select.R @@ -1,4 +1,5 @@ test_that("select on scans", { + skip_if_no_tidyselect() data(example_scan) expect_equal(names(dplyr::select(example_scan, ZDR)$params), "ZDR") expect_equal(names(dplyr::select(example_scan, "ZDR")$params), "ZDR") @@ -6,6 +7,7 @@ test_that("select on scans", { expect_equal(names(dplyr::select(example_scan, starts_with("X"))$params), character()) }) test_that("select on pvols", { + skip_if_no_tidyselect() pvolfile <- system.file("extdata", "volume.h5", package = "bioRad") example_pvol <- read_pvolfile(pvolfile) expect_equal(names(dplyr::select(example_pvol, ZDR)$scans[[2]]$params), "ZDR") diff --git a/vignettes/range_correction.Rmd b/vignettes/range_correction.Rmd index 01130134b..5625dcc08 100644 --- a/vignettes/range_correction.Rmd +++ b/vignettes/range_correction.Rmd @@ -146,7 +146,9 @@ Or plot the vertically integrated density on a map: ```{r} bm <- "osm" -map(my_corrected_ppi, map=bm, param = "VIR", alpha = .5) +if (all(sapply(c("ggspatial","prettymapr", "rosm"), requireNamespace, quietly = TRUE))){ + map(my_corrected_ppi, map=bm, param = "VIR", alpha = .5) +} ``` ## 6 Overlap between radiation profile and bird profile