From da62f5afdd3d36c6f171bc6b8f34b4aac7d12ad8 Mon Sep 17 00:00:00 2001 From: your name Date: Tue, 20 Aug 2024 00:24:01 -0400 Subject: [PATCH 01/23] Update loc_check to support custom coords + elev_col - Add coords and elev_col arguments to loc_check to support custom coordinate values or elevation column values - Add loc_length helper - Also improve support for sfg input objects - Correct outdated function title --- R/internal.R | 102 +++++++++++++++++++++++++---------------------- man/loc_check.Rd | 13 +++--- 2 files changed, 63 insertions(+), 52 deletions(-) diff --git a/R/internal.R b/R/internal.R index 0ec0391..8ae23fa 100644 --- a/R/internal.R +++ b/R/internal.R @@ -35,67 +35,75 @@ get_tilexy <- function(bbx,z){ return(expand.grid(x_all,y_all)) } +#' Get length of vector or nrow for data frame input +#' @noRd +loc_length <- function(locations) { + if (is.data.frame(locations)) { + return(nrow(locations)) + } + + length(locations) +} -#' function to check input type and projection. All input types convert to a -#' SpatialPointsDataFrame for point elevation and bbx for raster. +#' function to check and prepare input locations +#' +#' All input types convert to a sf data frame. +#' @inheritParams sf::st_as_sf +#' @param elev_col Elevation column name. #' @keywords internal -loc_check <- function(locations, prj = NULL){ - - if(is.null(nrow(locations))){ - nfeature <- length(locations) - } else { - nfeature <- nrow(locations) - } - - if(all(class(locations)=="data.frame")){ - if(is.null(prj) & !any(class(locations) %in% c("sf", "sfc", "sfg"))){ - stop("Please supply a valid sf crs via locations or prj.") +loc_check <- function(locations, + prj = NULL, + coords = c("x", "y"), + elev_col = "elevation") { + if (is.data.frame(locations) && !inherits(locations, "sf")) { + if (is.null(prj)) { + stop("Please supply a valid crs via locations or prj.") } - - locations <- sf::st_as_sf(x = locations, coords = c("x", "y"), crs = prj) - locations$elevation <- vector("numeric", nfeature) - - } else if(any(class(locations) %in% c("sf", "sfc", "sfg"))){ - + + locations <- sf::st_as_sf(x = locations, coords = coords, crs = prj) + } else if (inherits(locations, c("sf", "sfc", "sfg"))) { sf_crs <- sf::st_crs(locations) - locations$elevation <- vector("numeric", nfeature) - - if((is.null(sf_crs) | is.na(sf_crs)) & is.null(prj)){ - stop("Please supply an sf object with a valid crs.") + if ((is.null(sf_crs) || is.na(sf_crs)) && is.null(prj)) { + stop("Please supply an sf or sfc object with a valid crs.") } - - } else if(any(class(locations) %in% c("SpatRaster", "SpatVector"))){ - + + if (inherits(locations, "sfg")) { + locations <- sf::st_sfc(locations, crs = prj) + } + + if (!inherits(locations, "sf")) { + locations <- sf::st_as_sf(locations) + } + + } else if (any(class(locations) %in% c("SpatRaster", "SpatVector"))) { sf_crs <- sf::st_crs(locations) - locations <- sf::st_as_sf(terra::as.points(locations, values = FALSE), - coords = terra::crds(locations, df = TRUE), - crs = sf_crs) - locations$elevation <- vector("numeric", nrow(locations)) - if((is.null(sf_crs) | is.na(sf_crs)) & is.null(prj)){ - stop("Please supply a valid sf crs via locations or prj.") + locations <- sf::st_as_sf( + terra::as.points(locations, values = FALSE), + coords = terra::crds(locations, df = TRUE), + crs = sf_crs + ) + + if ((is.null(sf_crs) || is.na(sf_crs)) && is.null(prj)) { + stop("Please supply a valid crs via locations or prj.") } } - #check for long>180 - if(is.null(prj)){ - prj_test <- sf::st_crs(locations) + nfeature <- loc_length(locations) + locations[[elev_col]] <- vector("numeric", nfeature) + + # check for long>180 + if (!is.null(prj)) { + lll <- sf::st_is_longlat(prj) } else { - prj_test <- prj + lll <- sf::st_is_longlat(locations) } - - lll <- sf::st_is_longlat(prj_test) - - if(lll){ - if(any(sf::st_coordinates(locations)[,1]>180)){ - stop("The elevatr package requires longitude in a range from -180 to 180.") - } + if (lll && any(sf::st_coordinates(locations)[, 1] > 180)) { + stop("The elevatr package requires longitude in a range from -180 to 180.") } - -locations -} - + locations +} #' function to project bounding box and if needed expand it #' @keywords internal diff --git a/man/loc_check.Rd b/man/loc_check.Rd index 1146d27..e0e1c00 100644 --- a/man/loc_check.Rd +++ b/man/loc_check.Rd @@ -2,13 +2,16 @@ % Please edit documentation in R/internal.R \name{loc_check} \alias{loc_check} -\title{function to check input type and projection. All input types convert to a -SpatialPointsDataFrame for point elevation and bbx for raster.} +\title{function to check and prepare input locations} \usage{ -loc_check(locations, prj = NULL) +loc_check(locations, prj = NULL, coords = c("x", "y"), elev_col = "elevation") +} +\arguments{ +\item{coords}{in case of point data: names or numbers of the numeric columns holding coordinates} + +\item{elev_col}{Elevation column name.} } \description{ -function to check input type and projection. All input types convert to a -SpatialPointsDataFrame for point elevation and bbx for raster. +All input types convert to a sf data frame. } \keyword{internal} From 7b93d43b13019ab3c2f8147a15352a800d80cd8c Mon Sep 17 00:00:00 2001 From: your name Date: Tue, 20 Aug 2024 00:25:44 -0400 Subject: [PATCH 02/23] Update loc_check test to match modified messages --- tests/testthat/test-internal.R | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/testthat/test-internal.R b/tests/testthat/test-internal.R index 3d12fe8..a8691b8 100644 --- a/tests/testthat/test-internal.R +++ b/tests/testthat/test-internal.R @@ -53,10 +53,10 @@ test_that("proj_expand works",{ test_that("loc_check errors correctly", { empty_rast <- rast(nrow = 1, ncol =1) - expect_error(get_elev_point(locations = pt_df), - "Please supply a valid sf crs via locations or prj.") + expect_error(get_elev_point(locations = pt_df), + "Please supply a valid crs via locations or prj.") expect_error(get_elev_point(locations = rast_na), - "Please supply a valid sf crs via locations or prj.") + "Please supply a valid crs via locations or prj.") expect_error(get_elev_point(locations = sf_sm_na), "Please supply an sf object with a valid crs.") }) From 32a351ba7e231f1492d40a132abc3adfcd9b6d09 Mon Sep 17 00:00:00 2001 From: your name Date: Tue, 20 Aug 2024 00:32:28 -0400 Subject: [PATCH 03/23] Add support for custom coords + elev_col - Add support for custom coords value to `get_elev_raster()` - Add support to `get_elev_point()` for custom elevation column and elevation units column name - Add support for custom elevation column value to `get_epqs()` and `get_aws_points()` - Add support for custom units values beyond meters and feet - Remove trailing spaces (automatic in RStudio - hope it doesn't clutter the commit too badly!) --- R/get_elev_point.R | 411 ++++++++++++++++++++++---------------------- R/get_elev_raster.R | 380 ++++++++++++++++++++-------------------- 2 files changed, 396 insertions(+), 395 deletions(-) diff --git a/R/get_elev_point.R b/R/get_elev_point.R index 3e6da20..009f05b 100644 --- a/R/get_elev_point.R +++ b/R/get_elev_point.R @@ -1,40 +1,40 @@ #' Get Point Elevation -#' -#' This function provides access to point elevations using either the USGS -#' Elevation Point Query Service (US Only) or by extracting point elevations -#' from the AWS Terrain Tiles. The function accepts a \code{data.frame} of x -#' (long) and y (lat) or a \code{sf} \code{POINT} or \code{MULTIPOINT} object as -#' input. A \code{sf} \code{POINT} or \code{MULTIPOINT} object is returned with -#' elevation and elevation units as an added \code{data.frame}. -#' -#' -#' @param locations Either a \code{data.frame} with x (e.g. longitude) as the -#' first column and y (e.g. latitude) as the second column, a -#' \code{SpatialPoints}/\code{SpatialPointsDataFrame}, or a -#' \code{sf} \code{POINT} or \code{MULTIPOINT} object. -#' Elevation for these points will be returned in the +#' +#' This function provides access to point elevations using either the USGS +#' Elevation Point Query Service (US Only) or by extracting point elevations +#' from the AWS Terrain Tiles. The function accepts a \code{data.frame} of x +#' (long) and y (lat) or a \code{sf} \code{POINT} or \code{MULTIPOINT} object as +#' input. A \code{sf} \code{POINT} or \code{MULTIPOINT} object is returned with +#' elevation and elevation units as an added \code{data.frame}. +#' +#' +#' @param locations Either a \code{data.frame} with x (e.g. longitude) as the +#' first column and y (e.g. latitude) as the second column, a +#' \code{SpatialPoints}/\code{SpatialPointsDataFrame}, or a +#' \code{sf} \code{POINT} or \code{MULTIPOINT} object. +#' Elevation for these points will be returned in the #' originally supplied class. -#' @param prj A valid input to \code{\link{st_crs}}. This +#' @param prj A valid input to \code{\link{st_crs}}. This #' argument is required for a \code{data.frame} of locations and optional #' for \code{sf} locations. -#' @param src A character indicating which API to use, either "epqs" or "aws" -#' accepted. The "epqs" source is relatively slow for larger numbers -#' of points (e.g. > 500). The "aws" source may be quicker in these -#' cases provided the points are in a similar geographic area. The +#' @param src A character indicating which API to use, either "epqs" or "aws" +#' accepted. The "epqs" source is relatively slow for larger numbers +#' of points (e.g. > 500). The "aws" source may be quicker in these +#' cases provided the points are in a similar geographic area. The #' "aws" source downloads a DEM using \code{get_elev_raster} and then -#' extracts the elevation for each point. -#' @param overwrite A logical indicating that existing \code{elevation} and -#' \code{elev_units} columns should be overwritten. Default is -#' FALSE and \code{get_elev_point} will error if these columns +#' extracts the elevation for each point. +#' @param overwrite A logical indicating that existing \code{elevation} and +#' \code{elev_units} columns should be overwritten. Default is +#' FALSE and \code{get_elev_point} will error if these columns #' already exist. -#' @param ... Additional arguments passed to get_epqs or get_aws_points. When -#' using "aws" as the source, pay attention to the `z` argument. A -#' defualt of 5 is used, but this uses a raster with a large ~4-5 km -#' pixel. Additionally, the source data changes as zoom levels -#' increase. -#' Read \url{https://github.com/tilezen/joerd/blob/master/docs/data-sources.md#what-is-the-ground-resolution} -#' for details. -#' @return Function returns an \code{sf} object in the projection specified by +#' @param ... Additional arguments passed to get_epqs or get_aws_points. When +#' using "aws" as the source, pay attention to the `z` argument. A +#' default of 5 is used, but this uses a raster with a large ~4-5 km +#' pixel. Additionally, the source data changes as zoom levels +#' increase. +#' Read \url{https://github.com/tilezen/joerd/blob/master/docs/data-sources.md#what-is-the-ground-resolution} +#' for details. +#' @return Function returns an \code{sf} object in the projection specified by #' the \code{prj} argument. #' @export #' @examples @@ -42,9 +42,9 @@ #' library(elevatr) #' library(sf) #' library(terra) -#' -#' mts <- data.frame(x = c(-71.3036, -72.8145), -#' y = c(44.2700, 44.5438), +#' +#' mts <- data.frame(x = c(-71.3036, -72.8145), +#' y = c(44.2700, 44.5438), #' names = c("Mt. Washington", "Mt. Mansfield")) #' ll_prj <- 4326 #' mts_sf <- st_as_sf(x = mts, coords = c("x", "y"), crs = ll_prj) @@ -52,124 +52,155 @@ #' mts_raster <- rast(mts_sf, nrow = 5, ncol = 5) #' # Raster with cells for each location #' mts_raster_loc <- terra::rasterize(mts_sf, rast(mts_sf, nrow = 10, ncol = 10)) -#' +#' #' get_elev_point(locations = mts, prj = ll_prj) #' get_elev_point(locations = mts, units="feet", prj = ll_prj) #' get_elev_point(locations = mts_sf) #' get_elev_point(locations = mts_raster) #' get_elev_point(locations = mts_raster_loc) -#' -#' +#' +#' #' # Code to split into a loop and grab point at a time. -#' # This is usually faster for points that are spread apart -#' +#' # This is usually faster for points that are spread apart +#' #' library(dplyr) -#' +#' #' elev <- vector("numeric", length = nrow(mts)) #' for(i in seq_along(mts)){ -#' elev[i]<-get_elev_point(locations = mts[i,], prj = ll_prj, src = "aws", +#' elev[i]<-get_elev_point(locations = mts[i,], prj = ll_prj, src = "aws", #' z = 10)$elevation} #' mts_elev <- cbind(mts, elev) #' mts_elev #' } #' -get_elev_point <- function(locations, prj = NULL, src = c("epqs", "aws"), - overwrite = FALSE, ...){ - +get_elev_point <- function(locations, + prj = NULL, + src = c("epqs", "aws"), + coords = c("x", "y"), + overwrite = FALSE, + ..., + units = c("meters", "feet"), + elev_col = "elevation", + elev_units_col = "elev_units") { # First Check for internet if(!curl::has_internet()) { message("Please connect to the internet and try again.") return(NULL) } - + src <- match.arg(src) - + # Check for existing elevation/elev_units columns and overwrite or error - if(!overwrite & any(names(locations) %in% c("elevation", "elev_units"))){ - stop(paste0("The elevation and elev_units columns already exist.\n", + if (!overwrite && any(names(locations) %in% c(elev_col, elev_units_col))) { + stop(paste0("The elevation and elev_units columns already exist.\n", " To replace these columns set the overwrite argument to TRUE.")) } - - locations <- loc_check(locations,prj) - - if(is.null(prj)){ + + locations <- loc_check( + locations, + prj = prj, + coords = coords + ) + + if (is.null(prj)) { prj <- sf::st_crs(locations) } - + + if (all(units %in% c("meters", "feet"))) { + units <- match.arg(units) + convert_units <- units + } else { + convert_units <- units + units <- "meters" + } + # Pass of reprojected to epqs or aws to get data as spatialpointsdataframe - if (src == "epqs"){ - locations_prj <- get_epqs(locations, ...) - units <- locations_prj[[2]] + if (src == "epqs") { + locations_prj <- get_epqs( + locations, + elev_col = elev_col, + units = units, + ... + ) + locations_prj <- locations_prj[[1]] - } - - if(src == "aws"){ - locations_prj <- get_aws_points(locations, verbose = FALSE, ...) - units <- locations_prj[[2]] + } + + if(src == "aws") { + locations_prj <- get_aws_points( + locations, + verbose = FALSE, + elev_col = elev_col, + units = units, + ... + ) + locations_prj <- locations_prj[[1]] } # Re-project back to original, add in units, and return - locations <- sf::st_transform(sf::st_as_sf(locations_prj), - sf::st_crs(locations)) - - if(is.null(nrow(locations))){ - nfeature <- length(locations) - } else { - nfeature <- nrow(locations) - } - - unit_column_name <- "elev_units" - - if(any(names(list(...)) %in% "units")){ - if(list(...)$units == "feet"){ - locations[[unit_column_name]] <- rep("feet", nfeature) - } else { - locations[[unit_column_name]] <- rep("meters", nfeature) - } - } else { - locations[[unit_column_name]] <- rep("meters", nfeature) - } - - if(src == "aws") { - message(paste("Note: Elevation units are in", units)) - } else { - message(paste("Note: Elevation units are in", - tolower(strsplit(units, "=")[[1]][2]))) + locations <- sf::st_transform( + sf::st_as_sf(locations_prj), + crs = prj + ) + + nfeature <- loc_length(locations) + + if (convert_units != units) { + print("here") + mode <- "standard" + elev_units_label <- convert_units + + # TODO: Implement handling for non-character units values + # if (!is.character(convert_units)) { + # elev_units_label <- unique(as.character(base::units(convert_units)[["numerator"]])) + # mode <- units::units_options("set_units_mode") + # } + stopifnot(is.character(convert_units) && (length(convert_units) == 1)) + + elevation <- units::set_units(locations[[elev_col]], value = units, mode = mode) + + locations[[elev_col]] <- as.numeric( + units::set_units(elevation, value = convert_units, mode = mode) + ) + + units <- convert_units } - locations + + locations[[elev_units_col]] <- rep(units, nfeature) + + message(paste("Note: Elevation units are in", units)) + + relocate_sf_col_end(locations) } #' Get point elevation data from the USGS Elevation Point Query Service -#' +#' #' Function for accessing elevation data from the USGS epqs -#' -#' @param locations A SpatialPointsDataFrame of the location(s) for which you -#' wish to return elevation. The first column is Longitude and -#' the second column is Latitude. -#' @param units Character string of either meters or feet. Conversions for +#' +#' @param locations A SpatialPointsDataFrame of the location(s) for which you +#' wish to return elevation. The first column is Longitude and +#' the second column is Latitude. +#' @param units Character string of either meters or feet. Conversions for #' 'epqs' are handled by the API itself. #' @param ncpu Number of CPU's to use when downloading epqs data. -#' @param serial Logical to determine if API should be hit in serial or in +#' @param serial Logical to determine if API should be hit in serial or in #' parallel. TRUE will use purrr, FALSE will use furrr. -#' @return a list with a SpatialPointsDataFrame or sf POINT or MULTIPOINT object with +#' @return a list with a SpatialPointsDataFrame or sf POINT or MULTIPOINT object with #' elevation added to the data slot and a character of the elevation units #' @export #' @importFrom progressr handlers progressor with_progress #' @importFrom purrr map_dbl #' @keywords internal -get_epqs <- function(locations, units = c("meters","feet"), +get_epqs <- function(locations, + units = c("meters", "feet"), ncpu = future::availableCores() - 1, - serial = NULL){ - + serial = NULL, + elev_col = "elevation") { + ll_prj <- "EPSG:4326" - - if(is.null(nrow(locations))){ - nfeature <- length(locations) - } else { - nfeature <- nrow(locations) - } - + nfeature <- loc_length(locations) + if(is.null(serial)){ if(nfeature < 25){ serial <- TRUE @@ -177,90 +208,91 @@ get_epqs <- function(locations, units = c("meters","feet"), serial <- FALSE } } - + base_url <- "https://epqs.nationalmap.gov/v1/json?" - - if(match.arg(units) == "meters"){ - units <- "Meters" - } else if(match.arg(units) == "feet"){ - units <- "Feet" - } - + + units <- match.arg(units) + + units <- switch(units, + meters = "Meters", + feet = "Feet" + ) + locations <- sf::st_transform(locations, sf::st_crs(ll_prj)) units <- paste0("&units=",units) - + get_epqs_resp <- function(coords, base_url, units, progress = FALSE) { - + Sys.sleep(0.001) #Getting non-repeateable errors maybe too many hits... x <- coords[1] y <- coords[2] - + loc <- paste0("x=",x, "&y=", y) url <- paste0(base_url,loc,units,"&output=json") - + resp <- tryCatch(httr::GET(url), error = function(e) e) n<-1 - + while(n <= 5 & any(class(resp) == "simpleError")) { # Hit it again to test as most times this is a unexplained timeout that # Corrects on next hit - + resp <- tryCatch(httr::GET(url), error = function(e) e) n <- n + 1 } - + if(n > 5 & any(class(resp) == "simpleError")) { - message(paste0("API returned:'", resp$message, - "'. NA returned for elevation"), + message(paste0("API returned:'", resp$message, + "'. NA returned for elevation"), call. = FALSE) return(NA) } - - if(httr::status_code(resp) == 200 & - httr::content(resp, "text", encoding = "UTF-8") == + + if(httr::status_code(resp) == 200 & + httr::content(resp, "text", encoding = "UTF-8") == "Invalid or missing input parameters."){ message("API returned an empty repsonse (e.g. location in ocean or not in U.S.). NA returned for elevation") return(NA) } else if(httr::status_code(resp) == 200){ - - resp <- tryCatch(jsonlite::fromJSON(httr::content(resp, "text", encoding = "UTF-8"), + + resp <- tryCatch(jsonlite::fromJSON(httr::content(resp, "text", encoding = "UTF-8"), simplifyVector = FALSE), error = function(e) e) n<-1 while(n <= 5 & any(class(resp) == "simpleError")) { # Hit it again. Getting hard to repeat API errors that usually self correct... - - resp <- tryCatch(jsonlite::fromJSON(httr::content(resp, "text", encoding = "UTF-8"), + + resp <- tryCatch(jsonlite::fromJSON(httr::content(resp, "text", encoding = "UTF-8"), simplifyVector = FALSE), error = function(e) e) n <- n + 1 } - + if(n >= 5 & any(class(resp) == "simpleError")) { message("API error, NA returned for elevation") return(NA) } - + } else { - message(paste0("API returned a status code:'", resp$status_code, - "'. NA returned for elevation"), + message(paste0("API returned a status code:'", resp$status_code, + "'. NA returned for elevation"), call. = FALSE) return(NA) } round(as.numeric(resp$value), 2) } - - coords_df <- split(data.frame(sf::st_coordinates(locations)), - seq_along(locations$elevation)) - + + coords_df <- split(data.frame(sf::st_coordinates(locations)), + seq_along(locations[["elevation"]])) + #elev_column_name <- make.unique(c(names(locations), "elevation")) #elev_column_name <- elev_column_name[!elev_column_name %in% names(locations)] - elev_column_name <- "elevation" - + elev_column_name <- elev_col + message("Downloading point elevations:") - + progressr::handlers( progressr::handler_progress( format = " Accessing point elevations [:bar] :percent", - clear = FALSE, + clear = FALSE, width= 60 )) #browser() @@ -273,99 +305,66 @@ get_epqs <- function(locations, units = c("meters","feet"), get_epqs_resp(x, base_url, units) }) } else { - + future::plan(future::multisession, workers = ncpu) p <- progressor(along = coords_df) - locations[[elev_column_name]] <-furrr::future_map_dbl(coords_df, + locations[[elev_column_name]] <- furrr::future_map_dbl(coords_df, function(x) { p() - get_epqs_resp(x, base_url, + get_epqs_resp(x, base_url, units)}) - + } }) - + # For areas without epqs values that return -1000000, switch to NA - locations$elevation[locations[[elev_column_name]] == -1000000] <- NA + locations[[elev_col]][locations[[elev_column_name]] == -1000000] <- NA location_list <- list(locations, units) if(serial==FALSE){future::plan(future::sequential)} location_list } #' Get point elevation data from the AWS Terrain Tiles -#' -#' Function for accessing elevation data from AWS and extracting the elevations -#' -#' @param locations Either a \code{data.frame} with x (e.g. longitude) as the -#' first column and y (e.g. latitude) as the second column, a -#' \code{SpatialPoints}/\code{SpatialPointsDataFrame}, or a -#' \code{sf} \code{POINT} or \code{MULTIPOINT} object. -#' Elevation for these points will be returned in the +#' +#' Function for accessing elevation data from AWS and extracting the elevations +#' +#' @param locations Either a \code{data.frame} with x (e.g. longitude) as the +#' first column and y (e.g. latitude) as the second column, a +#' \code{SpatialPoints}/\code{SpatialPointsDataFrame}, or a +#' \code{sf} \code{POINT} or \code{MULTIPOINT} object. +#' Elevation for these points will be returned in the #' originally supplied class. #' @param z The zoom level to return. The zoom ranges from 1 to 14. Resolution -#' of the resultant raster is determined by the zoom and latitude. For -#' details on zoom and resolution see the documentation from Mapzen at -#' \url{https://github.com/tilezen/joerd/blob/master/docs/data-sources.md#what-is-the-ground-resolution}. -#' default value is 5 is supplied. -#' @param units Character string of either meters or feet. Conversions for -#' 'aws' are handled in R as the AWS terrain tiles are served in +#' of the resultant raster is determined by the zoom and latitude. For +#' details on zoom and resolution see the documentation from Mapzen at +#' \url{https://github.com/tilezen/joerd/blob/master/docs/data-sources.md#what-is-the-ground-resolution}. +#' default value is 5 is supplied. +#' @param units Character string of either meters or feet. Conversions for +#' 'aws' are handled in R as the AWS terrain tiles are served in #' meters. -#' @param verbose Report back messages. +#' @param verbose Report back messages. #' @param ... Arguments to be passed to \code{get_elev_raster} -#' @return a list with a SpatialPointsDataFrame or sf POINT or MULTIPOINT object with +#' @return a list with a SpatialPointsDataFrame or sf POINT or MULTIPOINT object with #' elevation added to the data slot and a character of the elevation units #' @export #' @keywords internal -get_aws_points <- function(locations, z = 5, units = c("meters", "feet"), - verbose = TRUE, ...){ +get_aws_points <- function(locations, + z = 5, + units = c("meters", "feet"), + verbose = TRUE, + elev_col = "elevation", + ...) { units <- match.arg(units) dem <- get_elev_raster(locations, z, verbose = verbose, ...) dem <- methods::as(dem, "SpatRaster") elevation <- units::set_units(terra::extract(dem, locations)[,2], "m") - if(units == "feet"){ + if (units == "feet"){ elevation <- as.numeric(units::set_units(elevation, "ft")) } else { elevation <- as.numeric(elevation) } - locations$elevation <- round(elevation, 2) + + locations[[elev_col]] <- round(elevation, 2) location_list <- list(locations, units) location_list } - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/R/get_elev_raster.R b/R/get_elev_raster.R index 2ae7ca4..378fcc6 100644 --- a/R/get_elev_raster.R +++ b/R/get_elev_raster.R @@ -1,116 +1,118 @@ #' Get Raster Elevation -#' -#' Several web services provide access to raster elevation. Currently, this -#' function provides access to the Amazon Web Services Terrain Tiles and the -#' Open Topography global datasets API. The function accepts a \code{data.frame} -#' of x (long) and y (lat), an \code{sf}, or \code{terra} object as input. A +#' +#' Several web services provide access to raster elevation. Currently, this +#' function provides access to the Amazon Web Services Terrain Tiles and the +#' Open Topography global datasets API. The function accepts a \code{data.frame} +#' of x (long) and y (lat), an \code{sf}, or \code{terra} object as input. A #' \code{RasterLayer} object is returned. In subsequent versions, a \code{SpatRaster} #' will be returned. -#' -#' @param locations Either a \code{data.frame} of x (long) and y (lat), an -#' \code{sf}, or \code{terra} object as input. +#' +#' @param locations Either a \code{data.frame} of x (long) and y (lat), an +#' \code{sf}, or \code{terra} object as input. #' @param z The zoom level to return. The zoom ranges from 1 to 14. Resolution -#' of the resultant raster is determined by the zoom and latitude. For -#' details on zoom and resolution see the documentation from Mapzen at +#' of the resultant raster is determined by the zoom and latitude. For +#' details on zoom and resolution see the documentation from Mapzen at #' \url{https://github.com/tilezen/joerd/blob/master/docs/data-sources.md#what-is-the-ground-resolution}. -#' The z is not required for the OpenTopography data sources. -#' @param prj A valid input to \code{\link{st_crs}} If a \code{sf} -#' object or a \code{terra} object is provided as the \code{locations}, -#' the prj is optional and will be taken from \code{locations}. This +#' The z is not required for the OpenTopography data sources. +#' @param prj A valid input to \code{\link{st_crs}} If a \code{sf} +#' object or a \code{terra} object is provided as the \code{locations}, +#' the prj is optional and will be taken from \code{locations}. This #' argument is required for a \code{data.frame} of locations. -#' @param src A character indicating which API to use. Currently supports "aws" -#' and "gl3", "gl1", "alos", or "srtm15plus" from the OpenTopography API global +#' @param src A character indicating which API to use. Currently supports "aws" +#' and "gl3", "gl1", "alos", or "srtm15plus" from the OpenTopography API global #' datasets. "aws" is the default. #' @param expand A numeric value of a distance, in map units, used to expand the -#' bounding box that is used to fetch the terrain tiles. This can -#' be used for features that fall close to the edge of a tile or -#' for retrieving additional area around the feature. If the -#' feature is a single point, the area it returns will be small if +#' bounding box that is used to fetch the terrain tiles. This can +#' be used for features that fall close to the edge of a tile or +#' for retrieving additional area around the feature. If the +#' feature is a single point, the area it returns will be small if #' clip is set to "bbox". Default is NULL. -#' @param clip A character value used to determine clipping of returned DEM. -#' The default value is "tile" which returns the full tiles. Other -#' options are "bbox" which returns the DEM clipped to the bounding -#' box of the original locations (or expanded bounding box if used), -#' or "locations" if the spatial data (e.g. polygons) in the input -#' locations should be used to clip the DEM. Locations are not used +#' @param clip A character value used to determine clipping of returned DEM. +#' The default value is "tile" which returns the full tiles. Other +#' options are "bbox" which returns the DEM clipped to the bounding +#' box of the original locations (or expanded bounding box if used), +#' or "locations" if the spatial data (e.g. polygons) in the input +#' locations should be used to clip the DEM. Locations are not used #' to clip input point datasets. Instead the bounding box is used. -#' @param verbose Toggles on and off the note about units and coordinate +#' @param verbose Toggles on and off the note about units and coordinate #' reference system. -#' @param neg_to_na Some of the data sources return large negative numbers as -#' missing data. When the end result is a projected those -#' large negative numbers can vary. When set to TRUE, only +#' @param neg_to_na Some of the data sources return large negative numbers as +#' missing data. When the end result is a projected those +#' large negative numbers can vary. When set to TRUE, only #' zero and positive values are returned. Default is FALSE. -#' @param override_size_check Boolean to override size checks. Any download +#' @param override_size_check Boolean to override size checks. Any download #' between 100 Mb and 500Mb report a message but -#' continue. Between 500Mb and 3000Mb requires +#' continue. Between 500Mb and 3000Mb requires #' interaction and greater than 3000Mb fails. These -#' can be overriden with this argument set to TRUE. -#' @param tmp_dir The location to store downloaded raster files. Defaults to a -#' temporary location. Alternatively, the user may supply an -#' existing path for these raster files. New folders are not -#' created by \code{get_elev_raster}. -#' @param ... Extra arguments to pass to \code{httr::GET} via a named vector, +#' can be overriden with this argument set to TRUE. +#' @param tmp_dir The location to store downloaded raster files. Defaults to a +#' temporary location. Alternatively, the user may supply an +#' existing path for these raster files. New folders are not +#' created by \code{get_elev_raster}. +#' @param ... Extra arguments to pass to \code{httr::GET} via a named vector, #' \code{config}. See -#' \code{\link{get_aws_terrain}} for more details. -#' @return Function returns a \code{RasterLayer} in the projection -#' specified by the \code{prj} argument or in the projection of the +#' \code{\link{get_aws_terrain}} for more details. +#' @return Function returns a \code{RasterLayer} in the projection +#' specified by the \code{prj} argument or in the projection of the #' provided locations. In subsequent versions, a \code{SpatRaster} #' will be returned. -#' @details Currently, the \code{get_elev_raster} function utilizes the -#' Amazon Web Services -#' (\url{https://registry.opendata.aws/terrain-tiles/}) terrain -#' tiles and the Open Topography Global Datasets API -#' (\url{https://opentopography.org/developers}). -#' -#' The AWS Terrain Tiles data is provided via x, y, and z tiles (see -#' \url{https://wiki.openstreetmap.org/wiki/Slippy_map_tilenames} for -#' details.) The x and y are determined from the bounding box of the -#' object submitted for \code{locations} argument, and the z argument -#' must be specified by the user. +#' @details Currently, the \code{get_elev_raster} function utilizes the +#' Amazon Web Services +#' (\url{https://registry.opendata.aws/terrain-tiles/}) terrain +#' tiles and the Open Topography Global Datasets API +#' (\url{https://opentopography.org/developers}). +#' +#' The AWS Terrain Tiles data is provided via x, y, and z tiles (see +#' \url{https://wiki.openstreetmap.org/wiki/Slippy_map_tilenames} for +#' details.) The x and y are determined from the bounding box of the +#' object submitted for \code{locations} argument, and the z argument +#' must be specified by the user. #' @export -#' @examples +#' @examples #' \dontrun{ #' library(elevatr) #' library(sf) #' data(lake) #' lake_buff <- st_buffer(lake, 1000) -#' loc_df <- data.frame(x = runif(6,min=sf::st_bbox(lake)$xmin, +#' loc_df <- data.frame(x = runif(6,min=sf::st_bbox(lake)$xmin, #' max=sf::st_bbox(lake)$xmax), -#' y = runif(6,min=sf::st_bbox(lake)$ymin, +#' y = runif(6,min=sf::st_bbox(lake)$ymin, #' max=sf::st_bbox(lake)$ymax)) -#' +#' #' x <- get_elev_raster(locations = loc_df, prj = st_crs(lake) , z=10) #' x <- get_elev_raster(lake, z = 14) #' x <- get_elev_raster(lake, src = "gl3", expand = 5000) #' x <- get_elev_raster(lake_buff, z = 10, clip = "locations") #' } -get_elev_raster <- function(locations, z, prj = NULL, +get_elev_raster <- function(locations, z, prj = NULL, src = c("aws", "gl3", "gl1", "alos", "srtm15plus"), - expand = NULL, clip = c("tile", "bbox", "locations"), - verbose = TRUE, neg_to_na = FALSE, - override_size_check = FALSE, tmp_dir = tempdir(), ...){ + expand = NULL, clip = c("tile", "bbox", "locations"), + verbose = TRUE, neg_to_na = FALSE, + override_size_check = FALSE,tmp_dir = tempdir(), + coords = c("x", "y"), + ...){ # First Check for internet if(!curl::has_internet()) { message("Please connect to the internet and try again.") return(NULL) } - + tmp_dir <- normalizePath(tmp_dir, mustWork = TRUE) src <- match.arg(src) - clip <- match.arg(clip) - + clip <- match.arg(clip) + # Check location type and if sf, set prj. If no prj (for either) then error - locations <- loc_check(locations,prj) - - if(is.null(prj)){ + locations <- loc_check(locations, prj, coords = coords) + + if (is.null(prj)) { prj <- sf::st_crs(locations) } #need to check what is going on with PRJ when no prj passed. # Check download size and provide feedback, stop if too big! dl_size <- estimate_raster_size(locations, prj, src, z) if(dl_size > 500 & dl_size < 1000){ - message(paste0("Note: Your request will download approximately ", + message(paste0("Note: Your request will download approximately ", round(dl_size, 1), "Mb.")) } else if(dl_size > 1000 & dl_size <= 3000){ message(paste0("Your request will download approximately ", @@ -121,110 +123,110 @@ get_elev_raster <- function(locations, z, prj = NULL, } } else if(!override_size_check & dl_size > 3000){ stop(paste0("Your request will download approximately ", - round(dl_size, 1), "Mb. That's probably too big. If you + round(dl_size, 1), "Mb. That's probably too big. If you really want to do this, set override_size_check = TRUE. Note that the OpenTopography API Limit will likely be exceeded.")) } - - + + # Pass of locations to APIs to get data as raster if(src == "aws") { - raster_elev <- get_aws_terrain(locations, z, prj = prj, expand = expand, + raster_elev <- get_aws_terrain(locations, z, prj = prj, expand = expand, tmp_dir = tmp_dir, ...) } else if(src %in% c("gl3", "gl1", "alos", "srtm15plus")){ - raster_elev <- get_opentopo(locations, src, prj = prj, expand = expand, + raster_elev <- get_opentopo(locations, src, prj = prj, expand = expand, tmp_dir = tmp_dir, ...) } sources <- attr(raster_elev, "sources") if(is.null(sources)){sources <- src} - + if(clip != "tile"){ message(paste("Clipping DEM to", clip)) - + raster_elev <- clip_it(raster_elev, locations, expand, clip) } - - if(verbose){ + + if (verbose){ message(paste("Note: Elevation units are in meters.")) } - - - if(neg_to_na){ + + + if (neg_to_na){ raster_elev[raster_elev < 0] <- NA } - + attr(raster_elev, "sources") <- sources #Returning raster for now #Switch to SpatRaster in near future. raster::raster(raster_elev) - + } #' Get a digital elevation model from the AWS Terrain Tiles -#' +#' #' This function uses the AWS Terrain Tile service to retrieve an elevation -#' raster from the geotiff service. It accepts a \code{sf::st_bbox} object as -#' input and returns a single raster object covering that extent. -#' -#' @source Attribution: Mapzen terrain tiles contain 3DEP, SRTM, and GMTED2010 -#' content courtesy of the U.S. Geological Survey and ETOPO1 content -#' courtesy of U.S. National Oceanic and Atmospheric Administration. -#' \url{https://github.com/tilezen/joerd/tree/master/docs} -#' -#' @param locations Either a \code{data.frame} of x (long) and y (lat), an +#' raster from the geotiff service. It accepts a \code{sf::st_bbox} object as +#' input and returns a single raster object covering that extent. +#' +#' @source Attribution: Mapzen terrain tiles contain 3DEP, SRTM, and GMTED2010 +#' content courtesy of the U.S. Geological Survey and ETOPO1 content +#' courtesy of U.S. National Oceanic and Atmospheric Administration. +#' \url{https://github.com/tilezen/joerd/tree/master/docs} +#' +#' @param locations Either a \code{data.frame} of x (long) and y (lat), an #' \code{sp}, \code{sf}, or \code{raster} object as input. #' @param z The zoom level to return. The zoom ranges from 1 to 14. Resolution -#' of the resultant raster is determined by the zoom and latitude. For -#' details on zoom and resolution see the documentation from Mapzen at +#' of the resultant raster is determined by the zoom and latitude. For +#' details on zoom and resolution see the documentation from Mapzen at #' \url{https://github.com/tilezen/joerd/blob/master/docs/data-sources.md#what-is-the-ground-resolution} -#' @param prj A valid input to \code{\link{st_crs}} If a \code{sf} -#' object or a \code{terra} object is provided as the \code{locations}, -#' the prj is optional and will be taken from \code{locations}. This +#' @param prj A valid input to \code{\link{st_crs}} If a \code{sf} +#' object or a \code{terra} object is provided as the \code{locations}, +#' the prj is optional and will be taken from \code{locations}. This #' argument is required for a \code{data.frame} of locations. #' @param expand A numeric value of a distance, in map units, used to expand the -#' bounding box that is used to fetch the terrain tiles. This can -#' be used for features that fall close to the edge of a tile and +#' bounding box that is used to fetch the terrain tiles. This can +#' be used for features that fall close to the edge of a tile and #' additional area around the feature is desired. Default is NULL. #' @param ncpu Number of CPU's to use when downloading aws tiles. -#' @param serial Logical to determine if API should be hit in serial or in -#' parallel. TRUE will use purrr, FALSE will use furrr. -#' @param tmp_dir The location to store downloaded raster files. Defaults to a -#' temporary location. Alternatively, the user may supply an -#' existing path for these raster files. New folders are not +#' @param serial Logical to determine if API should be hit in serial or in +#' parallel. TRUE will use purrr, FALSE will use furrr. +#' @param tmp_dir The location to store downloaded raster files. Defaults to a +#' temporary location. Alternatively, the user may supply an +#' existing path for these raster files. New folders are not #' created by \code{get_elev_raster}. -#' @param ... Extra configuration parameters to be passed to httr::GET. Common -#' usage is to adjust timeout. This is done as -#' \code{config=timeout(x)} where \code{x} is a numeric value in -#' seconds. Multiple configuration functions may be passed as a -#' vector. +#' @param ... Extra configuration parameters to be passed to httr::GET. Common +#' usage is to adjust timeout. This is done as +#' \code{config=timeout(x)} where \code{x} is a numeric value in +#' seconds. Multiple configuration functions may be passed as a +#' vector. #' @export #' @importFrom progressr handlers progressor with_progress #' @keywords internal -get_aws_terrain <- function(locations, z, prj, expand=NULL, +get_aws_terrain <- function(locations, z, prj, expand=NULL, ncpu = future::availableCores() - 1, serial = NULL, tmp_dir = tempdir(), ...){ # Expand (if needed) and re-project bbx to dd - + bbx <- proj_expand(locations,prj,expand) - + base_url <- "https://s3.amazonaws.com/elevation-tiles-prod/geotiff" - - + + tiles <- get_tilexy(bbx,z) - + urls <- sprintf("%s/%s/%s/%s.tif", base_url, z, tiles[,1], tiles[,2]) - + for(i in urls){ if(httr::http_error(i)) { message("An AWS URL is invalid.") return(NULL) } } - - + + dir <- tempdir() - + nurls <- length(urls) if(is.null(serial)){ if(nurls < 175){ @@ -233,31 +235,31 @@ get_aws_terrain <- function(locations, z, prj, expand=NULL, serial <- FALSE } } - + progressr::handlers( progressr::handler_progress( format = " Accessing raster elevation [:bar] :percent", - clear = FALSE, + clear = FALSE, width= 60 )) - + progressr::with_progress({ if(serial){ - + p <- progressr::progressor(along = urls) dem_list <- purrr::map(urls, function(x){ p() - tmpfile <- tempfile(tmpdir = tmp_dir, + tmpfile <- tempfile(tmpdir = tmp_dir, fileext = ".tif") - resp <- httr::GET(x, + resp <- httr::GET(x, httr::user_agent("elevatr R package (https://github.com/jhollist/elevatr)"), httr::write_disk(tmpfile,overwrite=TRUE), ...) if (!grepl("image/tif", httr::http_type(resp))) { stop(paste("This url:", x,"did not return a tif"), call. = FALSE) - } + } tmpfile2 <- tmpfile - attr(tmpfile2, "source") <- + attr(tmpfile2, "source") <- httr::headers(resp)$'x-amz-meta-x-imagery-sources' tmpfile2 }) @@ -268,20 +270,20 @@ get_aws_terrain <- function(locations, z, prj, expand=NULL, function(x){ p() tmpfile <- tempfile(tmpdir = tmp_dir, fileext = ".tif") - resp <- httr::GET(x, + resp <- httr::GET(x, httr::user_agent("elevatr R package (https://github.com/jhollist/elevatr)"), httr::write_disk(tmpfile,overwrite=TRUE), ...) if (!grepl("image/tif", httr::http_type(resp))) { stop(paste("This url:", x,"did not return a tif"), call. = FALSE) - } + } tmpfile2 <- tmpfile - attr(tmpfile2, "source") <- + attr(tmpfile2, "source") <- httr::headers(resp)$'x-amz-meta-x-imagery-sources' tmpfile2 }) } }) - + merged_elevation_grid <- merge_rasters(dem_list, target_prj = prj, tmp_dir = tmp_dir) sources <- unlist(lapply(dem_list, function(x) attr(x, "source"))) if(!is.null(sources)){ @@ -289,59 +291,59 @@ get_aws_terrain <- function(locations, z, prj, expand=NULL, sources <- strsplit(sources, "/") sources <- unlist(unique(lapply(sources, function(x) x[1]))) } - attr(merged_elevation_grid, "sources") <- + attr(merged_elevation_grid, "sources") <- paste(sources, collapse = ",") - + if(serial==FALSE){future::plan(future::sequential)} - - merged_elevation_grid + + merged_elevation_grid } #' Merge Rasters -#' -#' Merge multiple downloaded raster files into a single file. The input `target_prj` +#' +#' Merge multiple downloaded raster files into a single file. The input `target_prj` #' describes the projection for the new grid. -#' +#' #' @param raster_list a list of raster file paths to be mosaiced #' @param target_prj the target projection of the output raster -#' @param method the method for resampling/reprojecting. Default is 'bilinear'. +#' @param method the method for resampling/reprojecting. Default is 'bilinear'. #' Options can be found [here](https://gdal.org/programs/gdalwarp.html#cmdoption-gdalwarp-r) #' @param returnRaster if TRUE, return a raster object (default), else, return the file path to the object -#' @param tmp_dir The location to store downloaded raster files. Defaults to a -#' temporary location. Alternatively, the user may supply an -#' existing path for these raster files. New folders are not +#' @param tmp_dir The location to store downloaded raster files. Defaults to a +#' temporary location. Alternatively, the user may supply an +#' existing path for these raster files. New folders are not #' created by \code{get_elev_raster}. #' @export #' @keywords internal - -merge_rasters <- function(raster_list, target_prj, method = "bilinear", + +merge_rasters <- function(raster_list, target_prj, method = "bilinear", returnRaster = TRUE, tmp_dir = tempdir()){ - + message(paste("Mosaicing & Projecting")) - + destfile <- tempfile(tmpdir = tmp_dir, fileext = ".tif") files <- unlist(raster_list) - + if(is.null(target_prj)){ r <- terra::rast(files[1]) target_prj <- terra::crs(r) } - - sf::gdal_utils(util = "warp", - source = files, + + sf::gdal_utils(util = "warp", + source = files, destination = destfile, options = c("-r", method) ) # Using two steps now as gdal with one step introduced NA's along seams # Slower but more accurate! destfile2 <- tempfile(tmpdir = tmp_dir, fileext = ".tif") - sf::gdal_utils(util = "warp", - source = destfile, + sf::gdal_utils(util = "warp", + source = destfile, destination = destfile2, options = c("-r", method, "-t_srs", sf::st_crs(target_prj)$wkt) ) - + if(returnRaster){ terra::rast(destfile2) } else { @@ -350,38 +352,38 @@ merge_rasters <- function(raster_list, target_prj, method = "bilinear", } #' Get a digital elevation model from the Open Topography SRTM Version 3 -#' -#' This function uses the Open Topography SRTM Version 3 files. -#' +#' +#' This function uses the Open Topography SRTM Version 3 files. +#' #' @source Attribution: Details here -#' -#' @param locations Either a \code{data.frame} of x (long) and y (lat), an -#' \code{sp}, an \code{sf}, or \code{raster} object as input. -#' @param prj A valid input to \code{\link{st_crs}} If a \code{sf} -#' object or a \code{terra} object is provided as the \code{locations}, -#' the prj is optional and will be taken from \code{locations}. This +#' +#' @param locations Either a \code{data.frame} of x (long) and y (lat), an +#' \code{sp}, an \code{sf}, or \code{raster} object as input. +#' @param prj A valid input to \code{\link{st_crs}} If a \code{sf} +#' object or a \code{terra} object is provided as the \code{locations}, +#' the prj is optional and will be taken from \code{locations}. This #' argument is required for a \code{data.frame} of locations. #' @param expand A numeric value of a distance, in map units, used to expand the -#' bounding box that is used to fetch the SRTM data. -#' @param tmp_dir The location to store downloaded raster files. Defaults to a -#' temporary location. Alternatively, the user may supply an -#' existing path for these raster files. New folders are not +#' bounding box that is used to fetch the SRTM data. +#' @param tmp_dir The location to store downloaded raster files. Defaults to a +#' temporary location. Alternatively, the user may supply an +#' existing path for these raster files. New folders are not #' created by \code{get_elev_raster}. -#' @param ... Extra configuration parameters to be passed to httr::GET. Common -#' usage is to adjust timeout. This is done as -#' \code{config=timeout(x)} where \code{x} is a numeric value in -#' seconds. Multiple configuration functions may be passed as a -#' vector. +#' @param ... Extra configuration parameters to be passed to httr::GET. Common +#' usage is to adjust timeout. This is done as +#' \code{config=timeout(x)} where \code{x} is a numeric value in +#' seconds. Multiple configuration functions may be passed as a +#' vector. #' @export #' @keywords internal get_opentopo <- function(locations, src, prj, expand=NULL, tmp_dir = tempdir(), ...){ - + api_key <- get_opentopo_key() - + # Expand (if needed) and re-project bbx to ll_geo bbx <- proj_expand(locations,prj,expand) - + tmpfile <- tempfile(tmpdir = tmp_dir) base_url <- "https://portal.opentopography.org/API/globaldem?demtype=" data_set <- switch(src, @@ -389,7 +391,7 @@ get_opentopo <- function(locations, src, prj, expand=NULL, tmp_dir = tempdir(), gl1 = "SRTMGL1", alos = "AW3D30", srtm15plus = "SRTM15Plus") - + url <- paste0(base_url, data_set, "&west=",min(bbx["xmin"]), "&south=",min(bbx["ymin"]), @@ -397,43 +399,43 @@ get_opentopo <- function(locations, src, prj, expand=NULL, tmp_dir = tempdir(), "&north=",max(bbx["ymax"]), "&outputFormat=GTiff", "&API_Key=", api_key) - + if(httr::http_error(url)) { message("The OpenTopography URL is invalid.") return(NULL) } - + message("Downloading OpenTopography DEMs") - resp <- httr::GET(url,httr::write_disk(tmpfile,overwrite=TRUE), + resp <- httr::GET(url,httr::write_disk(tmpfile,overwrite=TRUE), httr::user_agent("elevatr R package (https://github.com/jhollist/elevatr)"), httr::progress(), ...) message("") if (httr::http_type(resp) != "application/octet-stream") { stop("API did not return octet-stream as expected", call. = FALSE) - } + } dem <- merge_rasters(tmpfile, target_prj = prj, tmp_dir = tmp_dir) dem } #' Store OpenTopography Key -#' +#' #' This function stores an OpenTopgrapy key in a local .Renviron file. If the -#' .Renviron file exists, the key will be appended. This will typically only -#' need to be done once per machine. -#' -#' -#' @param key An OpenTopography API Key as a character. For details on obtaining an -#' OpenTopgraphy key see \url{https://opentopography.org/blog/introducing-api-keys-access-opentopography-global-datasets}. +#' .Renviron file exists, the key will be appended. This will typically only +#' need to be done once per machine. +#' +#' +#' @param key An OpenTopography API Key as a character. For details on obtaining an +#' OpenTopgraphy key see \url{https://opentopography.org/blog/introducing-api-keys-access-opentopography-global-datasets}. #' @export set_opentopo_key <- function(key){ home <- normalizePath("~/") - if(Sys.getenv("OPENTOPO_KEY")==""){ - cat(paste0("OPENTOPO_KEY=", key, "\n"), file = paste0(home, "/.Renviron"), + if(Sys.getenv("OPENTOPO_KEY")==""){ + cat(paste0("OPENTOPO_KEY=", key, "\n"), file = paste0(home, "/.Renviron"), append = TRUE) message("Your OpenTopography Key has been added to .Renviron. You will need to restart R for the changes to take effect.") } else { message("An existing OpenTopography Key already exists. To edit try usethis::edit_r_environ().") - } -} \ No newline at end of file + } +} From 259cfece346cc2df4473d90f1963a478da411adc Mon Sep 17 00:00:00 2001 From: your name Date: Tue, 20 Aug 2024 00:34:51 -0400 Subject: [PATCH 04/23] Update docs - Update docs based on cleaned up whitespace - Add optional support for markdown formatted Roxygen --- DESCRIPTION | 3 +- man/get_aws_points.Rd | 25 +++++----- man/get_aws_terrain.Rd | 42 ++++++++--------- man/get_elev_point.Rd | 68 ++++++++++++++------------- man/get_elev_raster.Rd | 100 ++++++++++++++++++++-------------------- man/get_epqs.Rd | 15 +++--- man/get_opentopo.Rd | 22 ++++----- man/lake.Rd | 2 +- man/merge_rasters.Rd | 12 ++--- man/set_opentopo_key.Rd | 4 +- man/sf_big.Rd | 2 +- 11 files changed, 152 insertions(+), 143 deletions(-) diff --git a/DESCRIPTION b/DESCRIPTION index 38fd32c..963a066 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -40,7 +40,7 @@ Imports: License: MIT + file LICENSE Encoding: UTF-8 LazyData: true -RoxygenNote: 7.2.3 +RoxygenNote: 7.3.2 Suggests: testthat, knitr, @@ -48,3 +48,4 @@ Suggests: formatR, progress VignetteBuilder: knitr +Roxygen: list(markdown = TRUE) diff --git a/man/get_aws_points.Rd b/man/get_aws_points.Rd index 601587c..3778347 100644 --- a/man/get_aws_points.Rd +++ b/man/get_aws_points.Rd @@ -9,25 +9,26 @@ get_aws_points( z = 5, units = c("meters", "feet"), verbose = TRUE, + elev_col = "elevation", ... ) } \arguments{ -\item{locations}{Either a \code{data.frame} with x (e.g. longitude) as the -first column and y (e.g. latitude) as the second column, a -\code{SpatialPoints}/\code{SpatialPointsDataFrame}, or a -\code{sf} \code{POINT} or \code{MULTIPOINT} object. -Elevation for these points will be returned in the +\item{locations}{Either a \code{data.frame} with x (e.g. longitude) as the +first column and y (e.g. latitude) as the second column, a +\code{SpatialPoints}/\code{SpatialPointsDataFrame}, or a +\code{sf} \code{POINT} or \code{MULTIPOINT} object. +Elevation for these points will be returned in the originally supplied class.} \item{z}{The zoom level to return. The zoom ranges from 1 to 14. Resolution -of the resultant raster is determined by the zoom and latitude. For -details on zoom and resolution see the documentation from Mapzen at -\url{https://github.com/tilezen/joerd/blob/master/docs/data-sources.md#what-is-the-ground-resolution}. +of the resultant raster is determined by the zoom and latitude. For +details on zoom and resolution see the documentation from Mapzen at +\url{https://github.com/tilezen/joerd/blob/master/docs/data-sources.md#what-is-the-ground-resolution}. default value is 5 is supplied.} -\item{units}{Character string of either meters or feet. Conversions for -'aws' are handled in R as the AWS terrain tiles are served in +\item{units}{Character string of either meters or feet. Conversions for +'aws' are handled in R as the AWS terrain tiles are served in meters.} \item{verbose}{Report back messages.} @@ -35,8 +36,8 @@ meters.} \item{...}{Arguments to be passed to \code{get_elev_raster}} } \value{ -a list with a SpatialPointsDataFrame or sf POINT or MULTIPOINT object with - elevation added to the data slot and a character of the elevation units +a list with a SpatialPointsDataFrame or sf POINT or MULTIPOINT object with +elevation added to the data slot and a character of the elevation units } \description{ Function for accessing elevation data from AWS and extracting the elevations diff --git a/man/get_aws_terrain.Rd b/man/get_aws_terrain.Rd index fc8e004..9fcfe0b 100644 --- a/man/get_aws_terrain.Rd +++ b/man/get_aws_terrain.Rd @@ -4,10 +4,10 @@ \alias{get_aws_terrain} \title{Get a digital elevation model from the AWS Terrain Tiles} \source{ -Attribution: Mapzen terrain tiles contain 3DEP, SRTM, and GMTED2010 - content courtesy of the U.S. Geological Survey and ETOPO1 content - courtesy of U.S. National Oceanic and Atmospheric Administration. - \url{https://github.com/tilezen/joerd/tree/master/docs} +Attribution: Mapzen terrain tiles contain 3DEP, SRTM, and GMTED2010 +content courtesy of the U.S. Geological Survey and ETOPO1 content +courtesy of U.S. National Oceanic and Atmospheric Administration. +\url{https://github.com/tilezen/joerd/tree/master/docs} } \usage{ get_aws_terrain( @@ -22,43 +22,43 @@ get_aws_terrain( ) } \arguments{ -\item{locations}{Either a \code{data.frame} of x (long) and y (lat), an +\item{locations}{Either a \code{data.frame} of x (long) and y (lat), an \code{sp}, \code{sf}, or \code{raster} object as input.} \item{z}{The zoom level to return. The zoom ranges from 1 to 14. Resolution -of the resultant raster is determined by the zoom and latitude. For -details on zoom and resolution see the documentation from Mapzen at +of the resultant raster is determined by the zoom and latitude. For +details on zoom and resolution see the documentation from Mapzen at \url{https://github.com/tilezen/joerd/blob/master/docs/data-sources.md#what-is-the-ground-resolution}} -\item{prj}{A valid input to \code{\link{st_crs}} If a \code{sf} -object or a \code{terra} object is provided as the \code{locations}, -the prj is optional and will be taken from \code{locations}. This +\item{prj}{A valid input to \code{\link{st_crs}} If a \code{sf} +object or a \code{terra} object is provided as the \code{locations}, +the prj is optional and will be taken from \code{locations}. This argument is required for a \code{data.frame} of locations.} \item{expand}{A numeric value of a distance, in map units, used to expand the -bounding box that is used to fetch the terrain tiles. This can -be used for features that fall close to the edge of a tile and +bounding box that is used to fetch the terrain tiles. This can +be used for features that fall close to the edge of a tile and additional area around the feature is desired. Default is NULL.} \item{ncpu}{Number of CPU's to use when downloading aws tiles.} -\item{serial}{Logical to determine if API should be hit in serial or in +\item{serial}{Logical to determine if API should be hit in serial or in parallel. TRUE will use purrr, FALSE will use furrr.} -\item{tmp_dir}{The location to store downloaded raster files. Defaults to a -temporary location. Alternatively, the user may supply an -existing path for these raster files. New folders are not +\item{tmp_dir}{The location to store downloaded raster files. Defaults to a +temporary location. Alternatively, the user may supply an +existing path for these raster files. New folders are not created by \code{get_elev_raster}.} -\item{...}{Extra configuration parameters to be passed to httr::GET. Common -usage is to adjust timeout. This is done as -\code{config=timeout(x)} where \code{x} is a numeric value in -seconds. Multiple configuration functions may be passed as a +\item{...}{Extra configuration parameters to be passed to httr::GET. Common +usage is to adjust timeout. This is done as +\code{config=timeout(x)} where \code{x} is a numeric value in +seconds. Multiple configuration functions may be passed as a vector.} } \description{ This function uses the AWS Terrain Tile service to retrieve an elevation -raster from the geotiff service. It accepts a \code{sf::st_bbox} object as +raster from the geotiff service. It accepts a \code{sf::st_bbox} object as input and returns a single raster object covering that extent. } \keyword{internal} diff --git a/man/get_elev_point.Rd b/man/get_elev_point.Rd index e063b4d..8a12f13 100644 --- a/man/get_elev_point.Rd +++ b/man/get_elev_point.Rd @@ -8,52 +8,56 @@ get_elev_point( locations, prj = NULL, src = c("epqs", "aws"), + coords = c("x", "y"), overwrite = FALSE, - ... + ..., + units = c("meters", "feet"), + elev_col = "elevation", + elev_units_col = "elev_units" ) } \arguments{ -\item{locations}{Either a \code{data.frame} with x (e.g. longitude) as the -first column and y (e.g. latitude) as the second column, a -\code{SpatialPoints}/\code{SpatialPointsDataFrame}, or a -\code{sf} \code{POINT} or \code{MULTIPOINT} object. -Elevation for these points will be returned in the +\item{locations}{Either a \code{data.frame} with x (e.g. longitude) as the +first column and y (e.g. latitude) as the second column, a +\code{SpatialPoints}/\code{SpatialPointsDataFrame}, or a +\code{sf} \code{POINT} or \code{MULTIPOINT} object. +Elevation for these points will be returned in the originally supplied class.} -\item{prj}{A valid input to \code{\link{st_crs}}. This +\item{prj}{A valid input to \code{\link{st_crs}}. This argument is required for a \code{data.frame} of locations and optional for \code{sf} locations.} -\item{src}{A character indicating which API to use, either "epqs" or "aws" -accepted. The "epqs" source is relatively slow for larger numbers -of points (e.g. > 500). The "aws" source may be quicker in these -cases provided the points are in a similar geographic area. The +\item{src}{A character indicating which API to use, either "epqs" or "aws" +accepted. The "epqs" source is relatively slow for larger numbers +of points (e.g. > 500). The "aws" source may be quicker in these +cases provided the points are in a similar geographic area. The "aws" source downloads a DEM using \code{get_elev_raster} and then extracts the elevation for each point.} -\item{overwrite}{A logical indicating that existing \code{elevation} and -\code{elev_units} columns should be overwritten. Default is -FALSE and \code{get_elev_point} will error if these columns +\item{overwrite}{A logical indicating that existing \code{elevation} and +\code{elev_units} columns should be overwritten. Default is +FALSE and \code{get_elev_point} will error if these columns already exist.} -\item{...}{Additional arguments passed to get_epqs or get_aws_points. When -using "aws" as the source, pay attention to the `z` argument. A -defualt of 5 is used, but this uses a raster with a large ~4-5 km -pixel. Additionally, the source data changes as zoom levels -increase. -Read \url{https://github.com/tilezen/joerd/blob/master/docs/data-sources.md#what-is-the-ground-resolution} +\item{...}{Additional arguments passed to get_epqs or get_aws_points. When +using "aws" as the source, pay attention to the \code{z} argument. A +default of 5 is used, but this uses a raster with a large ~4-5 km +pixel. Additionally, the source data changes as zoom levels +increase. +Read \url{https://github.com/tilezen/joerd/blob/master/docs/data-sources.md#what-is-the-ground-resolution} for details.} } \value{ -Function returns an \code{sf} object in the projection specified by - the \code{prj} argument. +Function returns an \code{sf} object in the projection specified by +the \code{prj} argument. } \description{ -This function provides access to point elevations using either the USGS -Elevation Point Query Service (US Only) or by extracting point elevations -from the AWS Terrain Tiles. The function accepts a \code{data.frame} of x -(long) and y (lat) or a \code{sf} \code{POINT} or \code{MULTIPOINT} object as -input. A \code{sf} \code{POINT} or \code{MULTIPOINT} object is returned with +This function provides access to point elevations using either the USGS +Elevation Point Query Service (US Only) or by extracting point elevations +from the AWS Terrain Tiles. The function accepts a \code{data.frame} of x +(long) and y (lat) or a \code{sf} \code{POINT} or \code{MULTIPOINT} object as +input. A \code{sf} \code{POINT} or \code{MULTIPOINT} object is returned with elevation and elevation units as an added \code{data.frame}. } \examples{ @@ -62,8 +66,8 @@ library(elevatr) library(sf) library(terra) -mts <- data.frame(x = c(-71.3036, -72.8145), - y = c(44.2700, 44.5438), +mts <- data.frame(x = c(-71.3036, -72.8145), + y = c(44.2700, 44.5438), names = c("Mt. Washington", "Mt. Mansfield")) ll_prj <- 4326 mts_sf <- st_as_sf(x = mts, coords = c("x", "y"), crs = ll_prj) @@ -80,13 +84,13 @@ get_elev_point(locations = mts_raster_loc) # Code to split into a loop and grab point at a time. -# This is usually faster for points that are spread apart - +# This is usually faster for points that are spread apart + library(dplyr) elev <- vector("numeric", length = nrow(mts)) for(i in seq_along(mts)){ -elev[i]<-get_elev_point(locations = mts[i,], prj = ll_prj, src = "aws", +elev[i]<-get_elev_point(locations = mts[i,], prj = ll_prj, src = "aws", z = 10)$elevation} mts_elev <- cbind(mts, elev) mts_elev diff --git a/man/get_elev_raster.Rd b/man/get_elev_raster.Rd index 7d69cd0..0614176 100644 --- a/man/get_elev_raster.Rd +++ b/man/get_elev_raster.Rd @@ -15,92 +15,94 @@ get_elev_raster( neg_to_na = FALSE, override_size_check = FALSE, tmp_dir = tempdir(), + coords = c("x", "y"), ... ) } \arguments{ -\item{locations}{Either a \code{data.frame} of x (long) and y (lat), an +\item{locations}{Either a \code{data.frame} of x (long) and y (lat), an \code{sf}, or \code{terra} object as input.} \item{z}{The zoom level to return. The zoom ranges from 1 to 14. Resolution -of the resultant raster is determined by the zoom and latitude. For -details on zoom and resolution see the documentation from Mapzen at +of the resultant raster is determined by the zoom and latitude. For +details on zoom and resolution see the documentation from Mapzen at \url{https://github.com/tilezen/joerd/blob/master/docs/data-sources.md#what-is-the-ground-resolution}. The z is not required for the OpenTopography data sources.} -\item{prj}{A valid input to \code{\link{st_crs}} If a \code{sf} -object or a \code{terra} object is provided as the \code{locations}, -the prj is optional and will be taken from \code{locations}. This +\item{prj}{A valid input to \code{\link{st_crs}} If a \code{sf} +object or a \code{terra} object is provided as the \code{locations}, +the prj is optional and will be taken from \code{locations}. This argument is required for a \code{data.frame} of locations.} -\item{src}{A character indicating which API to use. Currently supports "aws" -and "gl3", "gl1", "alos", or "srtm15plus" from the OpenTopography API global +\item{src}{A character indicating which API to use. Currently supports "aws" +and "gl3", "gl1", "alos", or "srtm15plus" from the OpenTopography API global datasets. "aws" is the default.} \item{expand}{A numeric value of a distance, in map units, used to expand the -bounding box that is used to fetch the terrain tiles. This can -be used for features that fall close to the edge of a tile or -for retrieving additional area around the feature. If the -feature is a single point, the area it returns will be small if +bounding box that is used to fetch the terrain tiles. This can +be used for features that fall close to the edge of a tile or +for retrieving additional area around the feature. If the +feature is a single point, the area it returns will be small if clip is set to "bbox". Default is NULL.} \item{clip}{A character value used to determine clipping of returned DEM. -The default value is "tile" which returns the full tiles. Other -options are "bbox" which returns the DEM clipped to the bounding -box of the original locations (or expanded bounding box if used), -or "locations" if the spatial data (e.g. polygons) in the input -locations should be used to clip the DEM. Locations are not used +The default value is "tile" which returns the full tiles. Other +options are "bbox" which returns the DEM clipped to the bounding +box of the original locations (or expanded bounding box if used), +or "locations" if the spatial data (e.g. polygons) in the input +locations should be used to clip the DEM. Locations are not used to clip input point datasets. Instead the bounding box is used.} -\item{verbose}{Toggles on and off the note about units and coordinate +\item{verbose}{Toggles on and off the note about units and coordinate reference system.} -\item{neg_to_na}{Some of the data sources return large negative numbers as -missing data. When the end result is a projected those -large negative numbers can vary. When set to TRUE, only +\item{neg_to_na}{Some of the data sources return large negative numbers as +missing data. When the end result is a projected those +large negative numbers can vary. When set to TRUE, only zero and positive values are returned. Default is FALSE.} -\item{override_size_check}{Boolean to override size checks. Any download +\item{override_size_check}{Boolean to override size checks. Any download between 100 Mb and 500Mb report a message but -continue. Between 500Mb and 3000Mb requires +continue. Between 500Mb and 3000Mb requires interaction and greater than 3000Mb fails. These can be overriden with this argument set to TRUE.} -\item{tmp_dir}{The location to store downloaded raster files. Defaults to a -temporary location. Alternatively, the user may supply an -existing path for these raster files. New folders are not +\item{tmp_dir}{The location to store downloaded raster files. Defaults to a +temporary location. Alternatively, the user may supply an +existing path for these raster files. New folders are not created by \code{get_elev_raster}.} -\item{...}{Extra arguments to pass to \code{httr::GET} via a named vector, +\item{...}{Extra arguments to pass to \code{httr::GET} via a named vector, \code{config}. See \code{\link{get_aws_terrain}} for more details.} } \value{ -Function returns a \code{RasterLayer} in the projection - specified by the \code{prj} argument or in the projection of the - provided locations. In subsequent versions, a \code{SpatRaster} - will be returned. +Function returns a \code{RasterLayer} in the projection +specified by the \code{prj} argument or in the projection of the +provided locations. In subsequent versions, a \code{SpatRaster} +will be returned. } \description{ -Several web services provide access to raster elevation. Currently, this -function provides access to the Amazon Web Services Terrain Tiles and the -Open Topography global datasets API. The function accepts a \code{data.frame} -of x (long) and y (lat), an \code{sf}, or \code{terra} object as input. A +Several web services provide access to raster elevation. Currently, this +function provides access to the Amazon Web Services Terrain Tiles and the +Open Topography global datasets API. The function accepts a \code{data.frame} +of x (long) and y (lat), an \code{sf}, or \code{terra} object as input. A \code{RasterLayer} object is returned. In subsequent versions, a \code{SpatRaster} will be returned. } \details{ -Currently, the \code{get_elev_raster} function utilizes the - Amazon Web Services - (\url{https://registry.opendata.aws/terrain-tiles/}) terrain - tiles and the Open Topography Global Datasets API - (\url{https://opentopography.org/developers}). - - The AWS Terrain Tiles data is provided via x, y, and z tiles (see - \url{https://wiki.openstreetmap.org/wiki/Slippy_map_tilenames} for - details.) The x and y are determined from the bounding box of the - object submitted for \code{locations} argument, and the z argument - must be specified by the user. +Currently, the \code{get_elev_raster} function utilizes the +Amazon Web Services +(\url{https://registry.opendata.aws/terrain-tiles/}) terrain +tiles and the Open Topography Global Datasets API +(\url{https://opentopography.org/developers}). + +\if{html}{\out{
}}\preformatted{ The AWS Terrain Tiles data is provided via x, y, and z tiles (see + \url{https://wiki.openstreetmap.org/wiki/Slippy_map_tilenames} for + details.) The x and y are determined from the bounding box of the + object submitted for \code{locations} argument, and the z argument + must be specified by the user. +}\if{html}{\out{
}} } \examples{ \dontrun{ @@ -108,11 +110,11 @@ library(elevatr) library(sf) data(lake) lake_buff <- st_buffer(lake, 1000) -loc_df <- data.frame(x = runif(6,min=sf::st_bbox(lake)$xmin, +loc_df <- data.frame(x = runif(6,min=sf::st_bbox(lake)$xmin, max=sf::st_bbox(lake)$xmax), - y = runif(6,min=sf::st_bbox(lake)$ymin, + y = runif(6,min=sf::st_bbox(lake)$ymin, max=sf::st_bbox(lake)$ymax)) - + x <- get_elev_raster(locations = loc_df, prj = st_crs(lake) , z=10) x <- get_elev_raster(lake, z = 14) x <- get_elev_raster(lake, src = "gl3", expand = 5000) diff --git a/man/get_epqs.Rd b/man/get_epqs.Rd index a1f9a22..c1d8573 100644 --- a/man/get_epqs.Rd +++ b/man/get_epqs.Rd @@ -8,25 +8,26 @@ get_epqs( locations, units = c("meters", "feet"), ncpu = future::availableCores() - 1, - serial = NULL + serial = NULL, + elev_col = "elevation" ) } \arguments{ -\item{locations}{A SpatialPointsDataFrame of the location(s) for which you -wish to return elevation. The first column is Longitude and +\item{locations}{A SpatialPointsDataFrame of the location(s) for which you +wish to return elevation. The first column is Longitude and the second column is Latitude.} -\item{units}{Character string of either meters or feet. Conversions for +\item{units}{Character string of either meters or feet. Conversions for 'epqs' are handled by the API itself.} \item{ncpu}{Number of CPU's to use when downloading epqs data.} -\item{serial}{Logical to determine if API should be hit in serial or in +\item{serial}{Logical to determine if API should be hit in serial or in parallel. TRUE will use purrr, FALSE will use furrr.} } \value{ -a list with a SpatialPointsDataFrame or sf POINT or MULTIPOINT object with - elevation added to the data slot and a character of the elevation units +a list with a SpatialPointsDataFrame or sf POINT or MULTIPOINT object with +elevation added to the data slot and a character of the elevation units } \description{ Function for accessing elevation data from the USGS epqs diff --git a/man/get_opentopo.Rd b/man/get_opentopo.Rd index cb221ff..65c30f6 100644 --- a/man/get_opentopo.Rd +++ b/man/get_opentopo.Rd @@ -10,26 +10,26 @@ Attribution: Details here get_opentopo(locations, src, prj, expand = NULL, tmp_dir = tempdir(), ...) } \arguments{ -\item{locations}{Either a \code{data.frame} of x (long) and y (lat), an +\item{locations}{Either a \code{data.frame} of x (long) and y (lat), an \code{sp}, an \code{sf}, or \code{raster} object as input.} -\item{prj}{A valid input to \code{\link{st_crs}} If a \code{sf} -object or a \code{terra} object is provided as the \code{locations}, -the prj is optional and will be taken from \code{locations}. This +\item{prj}{A valid input to \code{\link{st_crs}} If a \code{sf} +object or a \code{terra} object is provided as the \code{locations}, +the prj is optional and will be taken from \code{locations}. This argument is required for a \code{data.frame} of locations.} \item{expand}{A numeric value of a distance, in map units, used to expand the bounding box that is used to fetch the SRTM data.} -\item{tmp_dir}{The location to store downloaded raster files. Defaults to a -temporary location. Alternatively, the user may supply an -existing path for these raster files. New folders are not +\item{tmp_dir}{The location to store downloaded raster files. Defaults to a +temporary location. Alternatively, the user may supply an +existing path for these raster files. New folders are not created by \code{get_elev_raster}.} -\item{...}{Extra configuration parameters to be passed to httr::GET. Common -usage is to adjust timeout. This is done as -\code{config=timeout(x)} where \code{x} is a numeric value in -seconds. Multiple configuration functions may be passed as a +\item{...}{Extra configuration parameters to be passed to httr::GET. Common +usage is to adjust timeout. This is done as +\code{config=timeout(x)} where \code{x} is a numeric value in +seconds. Multiple configuration functions may be passed as a vector.} } \description{ diff --git a/man/lake.Rd b/man/lake.Rd index 211b3ad..3630578 100644 --- a/man/lake.Rd +++ b/man/lake.Rd @@ -8,7 +8,7 @@ SpatialPolygonDataframe with 1 lakes, each with 13 variables } \description{ -This example data is a SpatialPolygonsDataFrame +This example data is a SpatialPolygonsDataFrame of a single lake, Lake Sunapee. Used for examples and tests. } \keyword{datasets} diff --git a/man/merge_rasters.Rd b/man/merge_rasters.Rd index 688d418..b7c2f01 100644 --- a/man/merge_rasters.Rd +++ b/man/merge_rasters.Rd @@ -17,18 +17,18 @@ merge_rasters( \item{target_prj}{the target projection of the output raster} -\item{method}{the method for resampling/reprojecting. Default is 'bilinear'. -Options can be found [here](https://gdal.org/programs/gdalwarp.html#cmdoption-gdalwarp-r)} +\item{method}{the method for resampling/reprojecting. Default is 'bilinear'. +Options can be found \href{https://gdal.org/programs/gdalwarp.html#cmdoption-gdalwarp-r}{here}} \item{returnRaster}{if TRUE, return a raster object (default), else, return the file path to the object} -\item{tmp_dir}{The location to store downloaded raster files. Defaults to a -temporary location. Alternatively, the user may supply an -existing path for these raster files. New folders are not +\item{tmp_dir}{The location to store downloaded raster files. Defaults to a +temporary location. Alternatively, the user may supply an +existing path for these raster files. New folders are not created by \code{get_elev_raster}.} } \description{ -Merge multiple downloaded raster files into a single file. The input `target_prj` +Merge multiple downloaded raster files into a single file. The input \code{target_prj} describes the projection for the new grid. } \keyword{internal} diff --git a/man/set_opentopo_key.Rd b/man/set_opentopo_key.Rd index d6fe684..d4a7abe 100644 --- a/man/set_opentopo_key.Rd +++ b/man/set_opentopo_key.Rd @@ -7,11 +7,11 @@ set_opentopo_key(key) } \arguments{ -\item{key}{An OpenTopography API Key as a character. For details on obtaining an +\item{key}{An OpenTopography API Key as a character. For details on obtaining an OpenTopgraphy key see \url{https://opentopography.org/blog/introducing-api-keys-access-opentopography-global-datasets}.} } \description{ This function stores an OpenTopgrapy key in a local .Renviron file. If the -.Renviron file exists, the key will be appended. This will typically only +.Renviron file exists, the key will be appended. This will typically only need to be done once per machine. } diff --git a/man/sf_big.Rd b/man/sf_big.Rd index 375d13a..433d77a 100644 --- a/man/sf_big.Rd +++ b/man/sf_big.Rd @@ -8,7 +8,7 @@ A sf POINT object } \description{ -This sf POINT dataset is 250 uniform random points to be used for +This sf POINT dataset is 250 uniform random points to be used for examples and tests } \keyword{datasets} From 978953d6a4accfbe891953435703ddb4c1c49376 Mon Sep 17 00:00:00 2001 From: your name Date: Tue, 20 Aug 2024 00:36:41 -0400 Subject: [PATCH 05/23] Add loc_linestring_to_point helper Also cleans up whitespace automatically --- R/internal.R | 80 ++++++++++++++++++++++++++++++++++++---------------- 1 file changed, 55 insertions(+), 25 deletions(-) diff --git a/R/internal.R b/R/internal.R index 8ae23fa..099c34d 100644 --- a/R/internal.R +++ b/R/internal.R @@ -21,8 +21,8 @@ get_tilexy <- function(bbx,z){ max_tile <- unlist(slippymath::lonlat_to_tilenum(bbx["xmax"],bbx["ymax"],z)) x_all <- seq(from = floor(min_tile[1]), to = floor(max_tile[1])) y_all <- seq(from = floor(min_tile[2]), to = floor(max_tile[2])) - - + + if(z == 1){ x_all <- x_all[x_all<2] y_all <- y_all[y_all<2] @@ -30,11 +30,41 @@ get_tilexy <- function(bbx,z){ x_all <- x_all[x_all<1] y_all <- y_all[y_all<1] } - - + + return(expand.grid(x_all,y_all)) } +#' function sf, sfc, or sfg with LINESTRING geometry to POINT geometry +#' +#' Optionally use [sf::st_line_sample()] when n, density, or sample are +#' supplied. +#' +#' @param locations A sf, sfc, or sfg object with LINESTRING geometry. +#' @inheritParams sf::st_line_sample +#' @keywords internal +loc_linestring_to_point <- function( + locations, + n = NULL, + density = NULL, + type = "regular", + sample = NULL) { + stopifnot( + sf::st_is(locations, "LINESTRING") + ) + + if (is.numeric(c(n, density, sample))) { + locations <- sf::st_line_sample( + locations, + n = n, density = density, + sample = sample, + type = type + ) + } + + sf::st_cast(locations, to = "POINT") +} + #' Get length of vector or nrow for data frame input #' @noRd loc_length <- function(locations) { @@ -108,7 +138,7 @@ loc_check <- function(locations, #' function to project bounding box and if needed expand it #' @keywords internal proj_expand <- function(locations,prj,expand){ - + lll <- sf::st_is_longlat(prj) #any(grepl("\\bGEOGCRS\\b",sf::st_crs(prj)) | # grepl("\\bGEODCRS\\b", sf::st_crs(prj)) | @@ -116,13 +146,13 @@ proj_expand <- function(locations,prj,expand){ # grepl("\\bGEOGRAPHICCRS\\b", sf::st_crs(prj)) | # grepl("\\blonglat\\b", sf::st_crs(prj)) | # grepl("\\blatlong\\b", sf::st_crs(prj))) - + if(is.null(nrow(locations))){ - nfeature <- length(locations) + nfeature <- length(locations) } else { nfeature <- nrow(locations) } - + if(any(sf::st_bbox(locations)[c("ymin","ymax")] == 0) & lll & is.null(expand)){ # Edge case for lat exactly at the equator - was returning NA expand <- 0.01 @@ -133,14 +163,14 @@ proj_expand <- function(locations,prj,expand){ # Edge case for single point and projected # set to 1000 meters unit <- sf::st_crs(sf::st_as_sf(locations), parameters = TRUE)$ud_unit - expand <- units::set_units(units::set_units(1000, "m"), unit, + expand <- units::set_units(units::set_units(1000, "m"), unit, mode = "standard") expand <- as.numeric(expand) } - + if(!is.null(expand)){ - + bbx <- sf::st_bbox(locations) + c(-expand, -expand, expand, expand) } else { bbx <- sf::st_bbox(locations) @@ -151,10 +181,10 @@ proj_expand <- function(locations,prj,expand){ bbx_coord_check <- as.numeric(bbx) if(any(!bbx_coord_check >= -180 & bbx_coord_check <= 360)){ stop("The elevatr package requires longitude in a range from -180 to 180.") - } + } bbx - + #sf expand - save for later #loc_sf <- sf::st_as_sf(locations) #loc_bbx <- sf::st_bbox(loc_sf) @@ -196,8 +226,8 @@ bbox_to_sf <- function(bbox, prj = 4326) { #' @param z zoom level if source is aws #' @keywords internal estimate_raster_size <- function(locations, prj, src, z = NULL){ - - locations <- bbox_to_sf(sf::st_bbox(locations), + + locations <- bbox_to_sf(sf::st_bbox(locations), prj = prj) locations <- sf::st_transform(locations, crs = 4326) @@ -207,14 +237,14 @@ estimate_raster_size <- function(locations, prj, src, z = NULL){ # Convert ground res to dd # zoom level 0 = 156543 meters 156543/111319.9 # old resolution (no idea how I calculated these...) - # c(0.54905236, 0.27452618, 0.15455633, 0.07145545, 0.03719130, 0.01901903, - # 0.00962056, 0.00483847, 0.00241219, 0.00120434, 0.00060173, 0.00030075, + # c(0.54905236, 0.27452618, 0.15455633, 0.07145545, 0.03719130, 0.01901903, + # 0.00962056, 0.00483847, 0.00241219, 0.00120434, 0.00060173, 0.00030075, # 0.00015035, 0.00007517, 0.00003758) - m_at_equator <- c(156543.0, 78271.5, 39135.8, 19567.9, 9783.9, 4892.0, 2446.0, - 1223.0, 611.5, 305.7, 152.9, 76.4, 38.2, 19.1, 9.6, 4.8, + m_at_equator <- c(156543.0, 78271.5, 39135.8, 19567.9, 9783.9, 4892.0, 2446.0, + 1223.0, 611.5, 305.7, 152.9, 76.4, 38.2, 19.1, 9.6, 4.8, 2.4) z_res <- data.frame(z = 0:16, res_dd = m_at_equator/111319.9) - + bits <- switch(src, aws = 32, gl3 = 32, @@ -228,20 +258,20 @@ estimate_raster_size <- function(locations, prj, src, z = NULL){ gl3 = 0.0008333, gl1 = 0.0002778, alos = 0.0002778, - srtm15plus = 0.004165) + srtm15plus = 0.004165) } num_rows <- (sf::st_bbox(locations)$xmax - sf::st_bbox(locations)$xmin)/res num_cols <- (sf::st_bbox(locations)$ymax - sf::st_bbox(locations)$ymin)/res - + num_megabytes <- (num_rows * num_cols * bits)/8388608 num_megabytes } #' OpenTopo Key -#' +#' #' The OpenTopography API now requires an API Key. This function will grab your #' key from an .Renviron file -#' +#' #' @keywords internal get_opentopo_key <- function(){ if(Sys.getenv("OPENTOPO_KEY")==""){ @@ -249,4 +279,4 @@ get_opentopo_key <- function(){ Please use elevatr::set_opentopo_key().") } Sys.getenv("OPENTOPO_KEY") -} \ No newline at end of file +} From 513ab7c903c28c7290df6c2213e89b7b39e808b9 Mon Sep 17 00:00:00 2001 From: your name Date: Tue, 20 Aug 2024 00:37:36 -0400 Subject: [PATCH 06/23] Add relocate_sf_col_end helper --- R/internal.R | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/R/internal.R b/R/internal.R index 099c34d..cf6d7af 100644 --- a/R/internal.R +++ b/R/internal.R @@ -280,3 +280,10 @@ get_opentopo_key <- function(){ } Sys.getenv("OPENTOPO_KEY") } + +#' Make sf column the last column in a sf data frame +#' @keywords internal +relocate_sf_col_end <- function(x) { + cols <- c(setdiff(names(x), attr(x, "sf_column")), attr(x, "sf_column")) + x[,cols, drop = FALSE] +} From d9f65f4e82b719de73faf11f65e75c6e194c19ec Mon Sep 17 00:00:00 2001 From: your name Date: Tue, 20 Aug 2024 00:38:19 -0400 Subject: [PATCH 07/23] Add st_point_distances helper --- R/internal.R | 61 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 61 insertions(+) diff --git a/R/internal.R b/R/internal.R index cf6d7af..aada37b 100644 --- a/R/internal.R +++ b/R/internal.R @@ -287,3 +287,64 @@ relocate_sf_col_end <- function(x) { cols <- c(setdiff(names(x), attr(x, "sf_column")), attr(x, "sf_column")) x[,cols, drop = FALSE] } + +#' Get distances between successive pairs of points +#' @inheritDotParams sf::st_distance -x -y +#' @keywords internal +st_point_distances <- function(x, + cumulative = TRUE, + units = NULL, + prj = sf::st_crs(x), + ...) { + stopifnot( + inherits(x, c("sfc", "sf")), + all(sf::st_is(x, "POINT")) + ) + + points <- x + + if (inherits(x, "sf")) { + points <- sf::st_geometry(x) + } + + point_list <- lapply( + points, + \(x) { + sf::st_sfc(x, crs = prj) + } + ) + + dist_points <- purrr::reduce( + seq_along(point_list), + \(x, y) { + if (y == length(point_list)) { + return(x) + } + + c( + x, + sf::st_distance( + x = point_list[[y]], + y = point_list[[y + 1]], + ... + ) + ) + }, + .init = 0 + ) + + if (cumulative) { + dist_points <- cumsum(dist_points) + } + + # TODO: Add handling for non character units values + if (!is.null(units)) { + dist_points <- units::set_units( + dist_points, + value = units, + mode = "standard" + ) + } + + dist_points +} From 05acff4a4c93f8507a2c098621838d18338101e1 Mon Sep 17 00:00:00 2001 From: your name Date: Tue, 20 Aug 2024 00:41:50 -0400 Subject: [PATCH 08/23] Refactor proj_expand - Simplify internal logic - Fix leftover use of ll_geo for crs instead of prj --- R/internal.R | 43 +++++++++++++++++-------------------------- 1 file changed, 17 insertions(+), 26 deletions(-) diff --git a/R/internal.R b/R/internal.R index aada37b..c2094fc 100644 --- a/R/internal.R +++ b/R/internal.R @@ -137,48 +137,39 @@ loc_check <- function(locations, #' function to project bounding box and if needed expand it #' @keywords internal -proj_expand <- function(locations,prj,expand){ - +proj_expand <- function(locations, prj, expand = NULL) { lll <- sf::st_is_longlat(prj) - #any(grepl("\\bGEOGCRS\\b",sf::st_crs(prj)) | - # grepl("\\bGEODCRS\\b", sf::st_crs(prj)) | - # grepl("\\bGEODETICCRS\\b", sf::st_crs(prj)) | - # grepl("\\bGEOGRAPHICCRS\\b", sf::st_crs(prj)) | - # grepl("\\blonglat\\b", sf::st_crs(prj)) | - # grepl("\\blatlong\\b", sf::st_crs(prj))) - - if(is.null(nrow(locations))){ - nfeature <- length(locations) - } else { - nfeature <- nrow(locations) - } + nfeature <- loc_length(locations) + single_pt <- nfeature == 1 && is.null(expand) + bbx <- sf::st_bbox(locations) - if(any(sf::st_bbox(locations)[c("ymin","ymax")] == 0) & lll & is.null(expand)){ + if (any(loc_bbox[c("ymin","ymax")] == 0) && lll && is.null(expand)) { # Edge case for lat exactly at the equator - was returning NA expand <- 0.01 - } else if(nfeature == 1 & lll & is.null(expand)){ + } else if (single_pt && lll) { # Edge case for single point and lat long expand <- 0.01 - } else if(nfeature == 1 & is.null(expand)){ + } else if (single_pt) { # Edge case for single point and projected # set to 1000 meters unit <- sf::st_crs(sf::st_as_sf(locations), parameters = TRUE)$ud_unit - expand <- units::set_units(units::set_units(1000, "m"), unit, - mode = "standard") + + expand <- units::set_units( + units::set_units(1000, "m"), + unit, + mode = "standard" + ) expand <- as.numeric(expand) } - if(!is.null(expand)){ - - bbx <- sf::st_bbox(locations) + c(-expand, -expand, expand, expand) - } else { - bbx <- sf::st_bbox(locations) + bbx <- bbx + c(-expand, -expand, expand, expand) } - bbx <- bbox_to_sf(bbx, prj = prj) - bbx <- sf::st_bbox(sf::st_transform(bbx, crs = ll_geo)) + bbx_sf <- bbox_to_sf(bbx, prj = prj) + bbx <- sf::st_bbox(sf::st_transform(bbx_sf, crs = prj)) bbx_coord_check <- as.numeric(bbx) + if(any(!bbx_coord_check >= -180 & bbx_coord_check <= 360)){ stop("The elevatr package requires longitude in a range from -180 to 180.") } From 285732ea7f7cfd9105d5fe2e3e43a7f961da38d5 Mon Sep 17 00:00:00 2001 From: your name Date: Tue, 20 Aug 2024 00:47:43 -0400 Subject: [PATCH 09/23] Add get_elev_profile function Function wraps `get_elev_point()` to add support for returning elevation along a profile line --- NAMESPACE | 1 + R/get_elev_profile.R | 95 +++++++++++++++++++++++++++++++++++++++++ man/get_elev_profile.Rd | 86 +++++++++++++++++++++++++++++++++++++ 3 files changed, 182 insertions(+) create mode 100644 R/get_elev_profile.R create mode 100644 man/get_elev_profile.Rd diff --git a/NAMESPACE b/NAMESPACE index d6b8469..eb0314c 100644 --- a/NAMESPACE +++ b/NAMESPACE @@ -3,6 +3,7 @@ export(get_aws_points) export(get_aws_terrain) export(get_elev_point) +export(get_elev_profile) export(get_elev_raster) export(get_epqs) export(get_opentopo) diff --git a/R/get_elev_profile.R b/R/get_elev_profile.R new file mode 100644 index 0000000..a3e0659 --- /dev/null +++ b/R/get_elev_profile.R @@ -0,0 +1,95 @@ +#' Get elevation along a profile +#' +#' @inheritParams loc_linestring_to_point +#' @param include Option of columns to include: one of "default", "dist", or +#' "cumdist". Default value returns same columns as [get_elev_point()]. If +#' `include = "dist"`, the returned locations include the distance between +#' each successive pair of points. If `include = "cumdist"`, the distances are +#' provided as a cumulative sum. +#' @inheritParams get_elev_point +#' @examples +#' +#' nc <- st_read(system.file("shape/nc.shp", package = "sf")) |> +#' st_transform(3857) +#' +#' nc_line <- suppressWarnings( +#' st_cast( +#' st_union( +#' st_centroid(nc[1, ]), +#' st_centroid(nc[2, ]) +#' ), +#' to = "LINESTRING" +#' ) +#' ) +#' +#' elev_point <- get_elev_profile( +#' nc_line, +#' units = "ft", +#' dist = TRUE, +#' cumulative = TRUE, +#' n = 10 +#' ) +#' +#' elev_point +#' +#' @export +get_elev_profile <- function(locations, + n = NULL, + density = NULL, + type = "regular", + sample = NULL, + units = NULL, + include = c("default", "dist", "cumdist"), + ..., + prj = NULL, + overwrite = FALSE, + coords = c("x", "y"), + dist_col = "distance", + elev_col = "elevation", + elev_units_col = "elev_units") { + locations <- loc_check(locations, prj = prj, elev_col = elev_col, coords = coords) + + if (sf::st_is(locations, "LINESTRING")) { + location_coords <- loc_linestring_to_point( + locations, + n = n, + density = density, + type = type, + sample = sample + ) + } + + stopifnot( + "`locations` must use POINT or LINESTRING geometry" = sf::st_is(locations, "POINT") + ) + + if (inherits(locations, c("sfc", "sf")) && is.null(prj)) { + prj <- sf::st_crs(locations) + } + + elev_point <- get_elev_point( + locations, + prj = prj, + units = units, + elev_col = elev_col, + elev_units_col = elev_units_col + ) + + include <- match.arg(include) + + if (include != "default") { + cumulative <- include == "cumdist" + + dist_values <- st_point_distances( + elev_point, + cumulative = cumulative, + units = units, + prj = prj + ) + + locations[[dist_col]] <- dist_values + locations <- relocate_sf_col_end(locations) + } + + locations +} diff --git a/man/get_elev_profile.Rd b/man/get_elev_profile.Rd new file mode 100644 index 0000000..3a08c8a --- /dev/null +++ b/man/get_elev_profile.Rd @@ -0,0 +1,86 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/get_elev_profile.R +\name{get_elev_profile} +\alias{get_elev_profile} +\title{Get elevation along a profile} +\usage{ +get_elev_profile( + locations, + n = NULL, + density = NULL, + type = "regular", + sample = NULL, + units = NULL, + include = c("default", "dist", "cumdist"), + ..., + prj = NULL, + overwrite = FALSE, + coords = c("x", "y"), + dist_col = "distance", + elev_col = "elevation", + elev_units_col = "elev_units" +) +} +\arguments{ +\item{locations}{A sf, sfc, or sfg object with LINESTRING geometry.} + +\item{n}{integer; number of points to choose per geometry; if missing, n will be computed as \code{round(density * st_length(geom))}.} + +\item{density}{numeric; density (points per distance unit) of the sampling, possibly a vector of length equal to the number of features (otherwise recycled); \code{density} may be of class \code{units}.} + +\item{type}{character; indicate the sampling type, either "regular" or "random"} + +\item{sample}{numeric; a vector of numbers between 0 and 1 indicating the points to sample - if defined sample overrules n, density and type.} + +\item{include}{Option of columns to include: one of "default", "dist", or +"cumdist". Default value returns same columns as \code{\link[=get_elev_point]{get_elev_point()}}. If +\code{include = "dist"}, the returned locations include the distance between +each successive pair of points. If \code{include = "cumdist"}, the distances are +provided as a cumulative sum.} + +\item{...}{Additional arguments passed to get_epqs or get_aws_points. When +using "aws" as the source, pay attention to the \code{z} argument. A +default of 5 is used, but this uses a raster with a large ~4-5 km +pixel. Additionally, the source data changes as zoom levels +increase. +Read \url{https://github.com/tilezen/joerd/blob/master/docs/data-sources.md#what-is-the-ground-resolution} +for details.} + +\item{prj}{A valid input to \code{\link{st_crs}}. This +argument is required for a \code{data.frame} of locations and optional +for \code{sf} locations.} + +\item{overwrite}{A logical indicating that existing \code{elevation} and +\code{elev_units} columns should be overwritten. Default is +FALSE and \code{get_elev_point} will error if these columns +already exist.} +} +\description{ +Get elevation along a profile +} +\examples{ + +nc <- st_read(system.file("shape/nc.shp", package = "sf")) |> + st_transform(3857) + +nc_line <- suppressWarnings( + st_cast( + st_union( + st_centroid(nc[1, ]), + st_centroid(nc[2, ]) + ), + to = "LINESTRING" + ) +) + +elev_point <- get_elev_profile( + nc_line, + units = "ft", + dist = TRUE, + cumulative = TRUE, + n = 10 +) + +elev_point + +} From c7a9a66a6688e1c01494703d8c33bd015c1fea8a Mon Sep 17 00:00:00 2001 From: your name Date: Tue, 20 Aug 2024 00:47:52 -0400 Subject: [PATCH 10/23] Update docs for helper functions --- man/loc_linestring_to_point.Rd | 30 ++++++++++++++++++++++++++++++ man/proj_expand.Rd | 2 +- man/relocate_sf_col_end.Rd | 12 ++++++++++++ man/st_point_distances.Rd | 29 +++++++++++++++++++++++++++++ 4 files changed, 72 insertions(+), 1 deletion(-) create mode 100644 man/loc_linestring_to_point.Rd create mode 100644 man/relocate_sf_col_end.Rd create mode 100644 man/st_point_distances.Rd diff --git a/man/loc_linestring_to_point.Rd b/man/loc_linestring_to_point.Rd new file mode 100644 index 0000000..d0ae27b --- /dev/null +++ b/man/loc_linestring_to_point.Rd @@ -0,0 +1,30 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/internal.R +\name{loc_linestring_to_point} +\alias{loc_linestring_to_point} +\title{function sf, sfc, or sfg with LINESTRING geometry to POINT geometry} +\usage{ +loc_linestring_to_point( + locations, + n = NULL, + density = NULL, + type = "regular", + sample = NULL +) +} +\arguments{ +\item{locations}{A sf, sfc, or sfg object with LINESTRING geometry.} + +\item{n}{integer; number of points to choose per geometry; if missing, n will be computed as \code{round(density * st_length(geom))}.} + +\item{density}{numeric; density (points per distance unit) of the sampling, possibly a vector of length equal to the number of features (otherwise recycled); \code{density} may be of class \code{units}.} + +\item{type}{character; indicate the sampling type, either "regular" or "random"} + +\item{sample}{numeric; a vector of numbers between 0 and 1 indicating the points to sample - if defined sample overrules n, density and type.} +} +\description{ +Optionally use \code{\link[sf:st_line_sample]{sf::st_line_sample()}} when n, density, or sample are +supplied. +} +\keyword{internal} diff --git a/man/proj_expand.Rd b/man/proj_expand.Rd index c7fb804..9b27f4b 100644 --- a/man/proj_expand.Rd +++ b/man/proj_expand.Rd @@ -4,7 +4,7 @@ \alias{proj_expand} \title{function to project bounding box and if needed expand it} \usage{ -proj_expand(locations, prj, expand) +proj_expand(locations, prj, expand = NULL) } \description{ function to project bounding box and if needed expand it diff --git a/man/relocate_sf_col_end.Rd b/man/relocate_sf_col_end.Rd new file mode 100644 index 0000000..060e75f --- /dev/null +++ b/man/relocate_sf_col_end.Rd @@ -0,0 +1,12 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/internal.R +\name{relocate_sf_col_end} +\alias{relocate_sf_col_end} +\title{Make sf column the last column in a sf data frame} +\usage{ +relocate_sf_col_end(x) +} +\description{ +Make sf column the last column in a sf data frame +} +\keyword{internal} diff --git a/man/st_point_distances.Rd b/man/st_point_distances.Rd new file mode 100644 index 0000000..a581e97 --- /dev/null +++ b/man/st_point_distances.Rd @@ -0,0 +1,29 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/internal.R +\name{st_point_distances} +\alias{st_point_distances} +\title{Get distances between successive pairs of points} +\usage{ +st_point_distances( + x, + cumulative = TRUE, + units = NULL, + prj = sf::st_crs(x), + ... +) +} +\arguments{ +\item{...}{ + Arguments passed on to \code{\link[sf:geos_measures]{sf::st_distance}} + \describe{ + \item{\code{dist_fun}}{deprecated} + \item{\code{by_element}}{logical; if \code{TRUE}, return a vector with distance between the first elements of \code{x} and \code{y}, the second, etc; an error is raised if \code{x} and \code{y} are not the same length. If \code{FALSE}, return the dense matrix with all pairwise distances.} + \item{\code{which}}{character; for Cartesian coordinates only: one of \code{Euclidean}, \code{Hausdorff} or \code{Frechet}; for geodetic coordinates, great circle distances are computed; see details} + \item{\code{par}}{for \code{which} equal to \code{Hausdorff} or \code{Frechet}, optionally use a value between 0 and 1 to densify the geometry} + \item{\code{tolerance}}{ignored if \code{st_is_longlat(x)} is \code{FALSE}; otherwise, if set to a positive value, the first distance smaller than \code{tolerance} will be returned, and true distance may be smaller; this may speed up computation. In meters, or a \code{units} object convertible to meters.} + }} +} +\description{ +Get distances between successive pairs of points +} +\keyword{internal} From 09f76a0f3854454929614c862105fe9bac749a49 Mon Sep 17 00:00:00 2001 From: your name Date: Tue, 20 Aug 2024 00:59:00 -0400 Subject: [PATCH 11/23] Fix typo in proj_expand Also fill missing argument definition for loc_check --- R/internal.R | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/R/internal.R b/R/internal.R index c2094fc..4229cde 100644 --- a/R/internal.R +++ b/R/internal.R @@ -78,6 +78,11 @@ loc_length <- function(locations) { #' function to check and prepare input locations #' #' All input types convert to a sf data frame. +#' +#' @param prj A valid input to \code{\link{st_crs}} If a \code{sf} +#' object or a \code{terra} object is provided as the \code{locations}, +#' the prj is optional and will be taken from \code{locations}. This +#' argument is required for a \code{data.frame} of locations. #' @inheritParams sf::st_as_sf #' @param elev_col Elevation column name. #' @keywords internal @@ -143,7 +148,7 @@ proj_expand <- function(locations, prj, expand = NULL) { single_pt <- nfeature == 1 && is.null(expand) bbx <- sf::st_bbox(locations) - if (any(loc_bbox[c("ymin","ymax")] == 0) && lll && is.null(expand)) { + if (any(bbx[c("ymin","ymax")] == 0) && lll && is.null(expand)) { # Edge case for lat exactly at the equator - was returning NA expand <- 0.01 } else if (single_pt && lll) { From f160a29f792c90f53c97d884bba8efcdc931ec67 Mon Sep 17 00:00:00 2001 From: your name Date: Tue, 20 Aug 2024 00:59:27 -0400 Subject: [PATCH 12/23] Fix test for loc_check --- tests/testthat/test-internal.R | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/tests/testthat/test-internal.R b/tests/testthat/test-internal.R index a8691b8..074d6b5 100644 --- a/tests/testthat/test-internal.R +++ b/tests/testthat/test-internal.R @@ -11,8 +11,8 @@ ll_prj <- 4326 aea_prj <- 5072 sf_sm <- st_as_sf(pt_df, coords = c("x", "y"), crs = ll_prj) -sf_sm_prj <- st_transform(sf_sm, crs = aea_prj) -bad_sf <- st_as_sf(data.frame(x = 1000, y = 1000), coords = c("x", "y"), +sf_sm_prj <- st_transform(sf_sm, crs = aea_prj) +bad_sf <- st_as_sf(data.frame(x = 1000, y = 1000), coords = c("x", "y"), crs = ll_prj) rast <- terra::rasterize(st_coordinates(sf_sm),terra::rast(sf_sm)) sf_sm_na <- sf_sm @@ -26,24 +26,24 @@ mts <- rbind(mt_wash,mt_mans) mts$name <- c("Mount Washington", "Mount Mansfield") test_that("data frame with more extra columns work", { - + mts_with_names_and_elevation <- get_elev_point(mts, ll_prj) expect_true("name" %in% names(mts_with_names_and_elevation)) }) test_that("proj_expand works",{ - - mans_sf <- st_as_sf(data.frame(x = -72.8145, y = 44.5438), + + mans_sf <- st_as_sf(data.frame(x = -72.8145, y = 44.5438), coords = c("x","y"), crs = ll_prj) mans <- get_elev_raster(locations = mans_sf, z = 6) mans_exp <- get_elev_raster(locations = mans_sf, z = 6, expand = 2) - + rast_elev <- get_elev_raster(locations = rast, z = 5) rast_elev_exp <- get_elev_raster(locations = rast, z = 5, exp = 5) expect_gt(ncell(mans_exp),ncell(mans)) expect_gt(ncell(rast_elev_exp), ncell(rast_elev)) - + origin_sf <- st_as_sf(data.frame(x = 0, y = 0), coords = c("x", "y"), crs = ll_prj) origins <- get_elev_raster(locations = origin_sf, z = 6) @@ -58,14 +58,14 @@ test_that("loc_check errors correctly", { expect_error(get_elev_point(locations = rast_na), "Please supply a valid crs via locations or prj.") expect_error(get_elev_point(locations = sf_sm_na), - "Please supply an sf object with a valid crs.") + "Please supply an sf or sfc object with a valid crs.") }) test_that("Z of 1 or 0 works in get_tilexy",{ sf_sm_1 <- get_elev_raster(sf_sm, z = 1, clip = "bbox") sf_sm_0 <- get_elev_raster(sf_sm, z = 0, clip = "bbox") - + expect_gt(max(res(sf_sm_1)), 0.27) expect_gt(max(res(sf_sm_0)), 0.54) }) From b89a257756ae378e3617b177c231495bc598b8bb Mon Sep 17 00:00:00 2001 From: your name Date: Tue, 20 Aug 2024 01:01:06 -0400 Subject: [PATCH 13/23] Fix example for get_elev_profile Fill missing argument definitions for get_elev_point --- R/get_elev_point.R | 8 +++++++- R/get_elev_profile.R | 8 ++++---- man/get_elev_point.Rd | 6 ++++++ man/get_elev_profile.Rd | 6 ++++++ man/loc_check.Rd | 5 +++++ 5 files changed, 28 insertions(+), 5 deletions(-) diff --git a/R/get_elev_point.R b/R/get_elev_point.R index 009f05b..c7f48b0 100644 --- a/R/get_elev_point.R +++ b/R/get_elev_point.R @@ -34,6 +34,11 @@ #' increase. #' Read \url{https://github.com/tilezen/joerd/blob/master/docs/data-sources.md#what-is-the-ground-resolution} #' for details. +#' @param units Default: `c("meters", "feet")`. Set to "meters" by default. Any +#' character string that is a valid distance unit supported by +#' `units::set_units()`. +#' @param elev_units_col Elevation units column. +#' @inheritParams loc_check #' @return Function returns an \code{sf} object in the projection specified by #' the \code{prj} argument. #' @export @@ -99,7 +104,8 @@ get_elev_point <- function(locations, locations <- loc_check( locations, prj = prj, - coords = coords + coords = coords, + elev_col = elev_col ) if (is.null(prj)) { diff --git a/R/get_elev_profile.R b/R/get_elev_profile.R index a3e0659..51cfc1f 100644 --- a/R/get_elev_profile.R +++ b/R/get_elev_profile.R @@ -9,12 +9,12 @@ #' @inheritParams get_elev_point #' @examples #' -#' nc <- st_read(system.file("shape/nc.shp", package = "sf")) |> -#' st_transform(3857) +#' nc <- sf::st_read(system.file("shape/nc.shp", package = "sf")) |> +#' sf::st_transform(3857) #' #' nc_line <- suppressWarnings( -#' st_cast( -#' st_union( +#' sf::st_cast( +#' sf::st_union( #' st_centroid(nc[1, ]), #' st_centroid(nc[2, ]) #' ), diff --git a/man/get_elev_point.Rd b/man/get_elev_point.Rd index 8a12f13..ce79c09 100644 --- a/man/get_elev_point.Rd +++ b/man/get_elev_point.Rd @@ -47,6 +47,12 @@ pixel. Additionally, the source data changes as zoom levels increase. Read \url{https://github.com/tilezen/joerd/blob/master/docs/data-sources.md#what-is-the-ground-resolution} for details.} + +\item{units}{Default: \code{c("meters", "feet")}. Set to "meters" by default. Any +character string that is a valid distance unit supported by +\code{units::set_units()}.} + +\item{elev_units_col}{Elevation units column.} } \value{ Function returns an \code{sf} object in the projection specified by diff --git a/man/get_elev_profile.Rd b/man/get_elev_profile.Rd index 3a08c8a..570a35e 100644 --- a/man/get_elev_profile.Rd +++ b/man/get_elev_profile.Rd @@ -32,6 +32,10 @@ get_elev_profile( \item{sample}{numeric; a vector of numbers between 0 and 1 indicating the points to sample - if defined sample overrules n, density and type.} +\item{units}{Default: \code{c("meters", "feet")}. Set to "meters" by default. Any +character string that is a valid distance unit supported by +\code{units::set_units()}.} + \item{include}{Option of columns to include: one of "default", "dist", or "cumdist". Default value returns same columns as \code{\link[=get_elev_point]{get_elev_point()}}. If \code{include = "dist"}, the returned locations include the distance between @@ -54,6 +58,8 @@ for \code{sf} locations.} \code{elev_units} columns should be overwritten. Default is FALSE and \code{get_elev_point} will error if these columns already exist.} + +\item{elev_units_col}{Elevation units column.} } \description{ Get elevation along a profile diff --git a/man/loc_check.Rd b/man/loc_check.Rd index e0e1c00..899d4bb 100644 --- a/man/loc_check.Rd +++ b/man/loc_check.Rd @@ -7,6 +7,11 @@ loc_check(locations, prj = NULL, coords = c("x", "y"), elev_col = "elevation") } \arguments{ +\item{prj}{A valid input to \code{\link{st_crs}} If a \code{sf} +object or a \code{terra} object is provided as the \code{locations}, +the prj is optional and will be taken from \code{locations}. This +argument is required for a \code{data.frame} of locations.} + \item{coords}{in case of point data: names or numbers of the numeric columns holding coordinates} \item{elev_col}{Elevation column name.} From ac1b78cc28031a8888dcabcd05b2a40ae2f09657 Mon Sep 17 00:00:00 2001 From: your name Date: Tue, 20 Aug 2024 01:01:37 -0400 Subject: [PATCH 14/23] Fill missing arguments for get_elev_raster --- R/get_elev_raster.R | 15 +++------------ man/get_elev_raster.Rd | 2 ++ 2 files changed, 5 insertions(+), 12 deletions(-) diff --git a/R/get_elev_raster.R b/R/get_elev_raster.R index 378fcc6..678a04f 100644 --- a/R/get_elev_raster.R +++ b/R/get_elev_raster.R @@ -14,10 +14,7 @@ #' details on zoom and resolution see the documentation from Mapzen at #' \url{https://github.com/tilezen/joerd/blob/master/docs/data-sources.md#what-is-the-ground-resolution}. #' The z is not required for the OpenTopography data sources. -#' @param prj A valid input to \code{\link{st_crs}} If a \code{sf} -#' object or a \code{terra} object is provided as the \code{locations}, -#' the prj is optional and will be taken from \code{locations}. This -#' argument is required for a \code{data.frame} of locations. +#' @inheritParams loc_check #' @param src A character indicating which API to use. Currently supports "aws" #' and "gl3", "gl1", "alos", or "srtm15plus" from the OpenTopography API global #' datasets. "aws" is the default. @@ -179,10 +176,7 @@ get_elev_raster <- function(locations, z, prj = NULL, #' of the resultant raster is determined by the zoom and latitude. For #' details on zoom and resolution see the documentation from Mapzen at #' \url{https://github.com/tilezen/joerd/blob/master/docs/data-sources.md#what-is-the-ground-resolution} -#' @param prj A valid input to \code{\link{st_crs}} If a \code{sf} -#' object or a \code{terra} object is provided as the \code{locations}, -#' the prj is optional and will be taken from \code{locations}. This -#' argument is required for a \code{data.frame} of locations. +#' @inheritParams loc_check #' @param expand A numeric value of a distance, in map units, used to expand the #' bounding box that is used to fetch the terrain tiles. This can #' be used for features that fall close to the edge of a tile and @@ -359,10 +353,7 @@ merge_rasters <- function(raster_list, target_prj, method = "bilinear", #' #' @param locations Either a \code{data.frame} of x (long) and y (lat), an #' \code{sp}, an \code{sf}, or \code{raster} object as input. -#' @param prj A valid input to \code{\link{st_crs}} If a \code{sf} -#' object or a \code{terra} object is provided as the \code{locations}, -#' the prj is optional and will be taken from \code{locations}. This -#' argument is required for a \code{data.frame} of locations. +#' @inheritParams loc_check #' @param expand A numeric value of a distance, in map units, used to expand the #' bounding box that is used to fetch the SRTM data. #' @param tmp_dir The location to store downloaded raster files. Defaults to a diff --git a/man/get_elev_raster.Rd b/man/get_elev_raster.Rd index 0614176..29e2570 100644 --- a/man/get_elev_raster.Rd +++ b/man/get_elev_raster.Rd @@ -72,6 +72,8 @@ temporary location. Alternatively, the user may supply an existing path for these raster files. New folders are not created by \code{get_elev_raster}.} +\item{coords}{in case of point data: names or numbers of the numeric columns holding coordinates} + \item{...}{Extra arguments to pass to \code{httr::GET} via a named vector, \code{config}. See \code{\link{get_aws_terrain}} for more details.} From 41867be5059d067694721cfba64060f83e719970 Mon Sep 17 00:00:00 2001 From: your name Date: Tue, 20 Aug 2024 01:03:32 -0400 Subject: [PATCH 15/23] Fill missing dist_col definition --- R/get_elev_profile.R | 6 ++++-- man/get_elev_point.Rd | 4 ++++ man/get_elev_profile.Rd | 19 +++++++++++++------ 3 files changed, 21 insertions(+), 8 deletions(-) diff --git a/R/get_elev_profile.R b/R/get_elev_profile.R index 51cfc1f..5f612b7 100644 --- a/R/get_elev_profile.R +++ b/R/get_elev_profile.R @@ -7,6 +7,8 @@ #' each successive pair of points. If `include = "cumdist"`, the distances are #' provided as a cumulative sum. #' @inheritParams get_elev_point +#' @param dist_col Column name to use for optional distance column. Only used if +#' `include` is set to `"dist"` or `"cumdist"`. #' @examples #' #' nc <- sf::st_read(system.file("shape/nc.shp", package = "sf")) |> @@ -44,9 +46,9 @@ get_elev_profile <- function(locations, prj = NULL, overwrite = FALSE, coords = c("x", "y"), - dist_col = "distance", elev_col = "elevation", - elev_units_col = "elev_units") { + elev_units_col = "elev_units", + dist_col = "distance") { locations <- loc_check(locations, prj = prj, elev_col = elev_col, coords = coords) if (sf::st_is(locations, "LINESTRING")) { diff --git a/man/get_elev_point.Rd b/man/get_elev_point.Rd index ce79c09..300e91e 100644 --- a/man/get_elev_point.Rd +++ b/man/get_elev_point.Rd @@ -35,6 +35,8 @@ cases provided the points are in a similar geographic area. The "aws" source downloads a DEM using \code{get_elev_raster} and then extracts the elevation for each point.} +\item{coords}{in case of point data: names or numbers of the numeric columns holding coordinates} + \item{overwrite}{A logical indicating that existing \code{elevation} and \code{elev_units} columns should be overwritten. Default is FALSE and \code{get_elev_point} will error if these columns @@ -52,6 +54,8 @@ for details.} character string that is a valid distance unit supported by \code{units::set_units()}.} +\item{elev_col}{Elevation column name.} + \item{elev_units_col}{Elevation units column.} } \value{ diff --git a/man/get_elev_profile.Rd b/man/get_elev_profile.Rd index 570a35e..b110869 100644 --- a/man/get_elev_profile.Rd +++ b/man/get_elev_profile.Rd @@ -16,9 +16,9 @@ get_elev_profile( prj = NULL, overwrite = FALSE, coords = c("x", "y"), - dist_col = "distance", elev_col = "elevation", - elev_units_col = "elev_units" + elev_units_col = "elev_units", + dist_col = "distance" ) } \arguments{ @@ -59,19 +59,26 @@ for \code{sf} locations.} FALSE and \code{get_elev_point} will error if these columns already exist.} +\item{coords}{in case of point data: names or numbers of the numeric columns holding coordinates} + +\item{elev_col}{Elevation column name.} + \item{elev_units_col}{Elevation units column.} + +\item{dist_col}{Column name to use for optional distance column. Only used if +\code{include} is set to \code{"dist"} or \code{"cumdist"}.} } \description{ Get elevation along a profile } \examples{ -nc <- st_read(system.file("shape/nc.shp", package = "sf")) |> - st_transform(3857) +nc <- sf::st_read(system.file("shape/nc.shp", package = "sf")) |> + sf::st_transform(3857) nc_line <- suppressWarnings( - st_cast( - st_union( + sf::st_cast( + sf::st_union( st_centroid(nc[1, ]), st_centroid(nc[2, ]) ), From 5418936b834e1e4306d1afeed59550af2efbf2c4 Mon Sep 17 00:00:00 2001 From: your name Date: Tue, 20 Aug 2024 01:11:06 -0400 Subject: [PATCH 16/23] Fix example for get_elev_profile Second try! --- R/get_elev_profile.R | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/R/get_elev_profile.R b/R/get_elev_profile.R index 5f612b7..9a2a682 100644 --- a/R/get_elev_profile.R +++ b/R/get_elev_profile.R @@ -17,8 +17,8 @@ #' nc_line <- suppressWarnings( #' sf::st_cast( #' sf::st_union( -#' st_centroid(nc[1, ]), -#' st_centroid(nc[2, ]) +#' sf::st_centroid(nc[1, ]), +#' sf::st_centroid(nc[2, ]) #' ), #' to = "LINESTRING" #' ) From e2da1f12d06f3fcf1d19b579ac30b0c6379822fa Mon Sep 17 00:00:00 2001 From: your name Date: Tue, 20 Aug 2024 01:21:54 -0400 Subject: [PATCH 17/23] Fix #91 Also add error message to avoid confusion like #96 and #94 --- R/internal.R | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/R/internal.R b/R/internal.R index 4229cde..f86621d 100644 --- a/R/internal.R +++ b/R/internal.R @@ -95,6 +95,10 @@ loc_check <- function(locations, stop("Please supply a valid crs via locations or prj.") } + stopifnot( + "`locations` must contain column names matching `coords`" = all(coords %in% names(locations)) + ) + locations <- sf::st_as_sf(x = locations, coords = coords, crs = prj) } else if (inherits(locations, c("sf", "sfc", "sfg"))) { sf_crs <- sf::st_crs(locations) @@ -112,11 +116,15 @@ loc_check <- function(locations, } else if (any(class(locations) %in% c("SpatRaster", "SpatVector"))) { sf_crs <- sf::st_crs(locations) - locations <- sf::st_as_sf( - terra::as.points(locations, values = FALSE), - coords = terra::crds(locations, df = TRUE), - crs = sf_crs - ) + coords <- terra::crds(locations, df = TRUE) + + if (inherits(locations, "SpatVector")) { + locations <- terra::as.points(locations) + } else { + locations <- terra::as.points(locations, values = FALSE) + } + + locations <- sf::st_as_sf(locations, coords = coords, crs = sf_crs) if ((is.null(sf_crs) || is.na(sf_crs)) && is.null(prj)) { stop("Please supply a valid crs via locations or prj.") From ae9a29be1646fcc1e4cf8ed8e5885f44e4b4c4ae Mon Sep 17 00:00:00 2001 From: your name Date: Tue, 20 Aug 2024 01:29:11 -0400 Subject: [PATCH 18/23] Expand get_elev_profile docs Also wrap example in don't run tag --- R/get_elev_profile.R | 24 +++++++++++++++++------- man/get_elev_profile.Rd | 20 ++++++++++++++------ 2 files changed, 31 insertions(+), 13 deletions(-) diff --git a/R/get_elev_profile.R b/R/get_elev_profile.R index 9a2a682..bff2bc7 100644 --- a/R/get_elev_profile.R +++ b/R/get_elev_profile.R @@ -1,4 +1,10 @@ -#' Get elevation along a profile +#' Get Point Elevation along a Profile Line +#' +#' [get_elev_profile()] allows users to provide LINESTRING inputs to +#' [sf::st_line_sample()] or to cast LINESTRING to POINT before calling +#' [get_elev_point()] to get the point elevations. The function allows users to +#' get elevation along a profile line and, optionally, include a distance or +#' cumulative distance column in the output sf data frame. #' #' @inheritParams loc_linestring_to_point #' @param include Option of columns to include: one of "default", "dist", or @@ -10,15 +16,18 @@ #' @param dist_col Column name to use for optional distance column. Only used if #' `include` is set to `"dist"` or `"cumdist"`. #' @examples +#' \dontrun{ +#' library(sf) +#' library(elevatr) #' -#' nc <- sf::st_read(system.file("shape/nc.shp", package = "sf")) |> -#' sf::st_transform(3857) +#' nc <- st_read(system.file("shape/nc.shp", package = "sf")) |> +#' st_transform(3857) #' #' nc_line <- suppressWarnings( -#' sf::st_cast( -#' sf::st_union( -#' sf::st_centroid(nc[1, ]), -#' sf::st_centroid(nc[2, ]) +#' st_cast( +#' st_union( +#' st_centroid(nc[1, ]), +#' st_centroid(nc[2, ]) #' ), #' to = "LINESTRING" #' ) @@ -34,6 +43,7 @@ #' #' elev_point #' +#' } #' @export get_elev_profile <- function(locations, n = NULL, diff --git a/man/get_elev_profile.Rd b/man/get_elev_profile.Rd index b110869..f4aba7d 100644 --- a/man/get_elev_profile.Rd +++ b/man/get_elev_profile.Rd @@ -2,7 +2,7 @@ % Please edit documentation in R/get_elev_profile.R \name{get_elev_profile} \alias{get_elev_profile} -\title{Get elevation along a profile} +\title{Get Point Elevation along a Profile Line} \usage{ get_elev_profile( locations, @@ -69,16 +69,23 @@ already exist.} \code{include} is set to \code{"dist"} or \code{"cumdist"}.} } \description{ -Get elevation along a profile +\code{\link[=get_elev_profile]{get_elev_profile()}} allows users to provide LINESTRING inputs to +\code{\link[sf:st_line_sample]{sf::st_line_sample()}} or to cast LINESTRING to POINT before calling +\code{\link[=get_elev_point]{get_elev_point()}} to get the point elevations. The function allows users to +get elevation along a profile line and, optionally, include a distance or +cumulative distance column in the output sf data frame. } \examples{ +\dontrun{ +library(sf) +library(elevatr) -nc <- sf::st_read(system.file("shape/nc.shp", package = "sf")) |> - sf::st_transform(3857) +nc <- st_read(system.file("shape/nc.shp", package = "sf")) |> + st_transform(3857) nc_line <- suppressWarnings( - sf::st_cast( - sf::st_union( + st_cast( + st_union( st_centroid(nc[1, ]), st_centroid(nc[2, ]) ), @@ -97,3 +104,4 @@ elev_point <- get_elev_profile( elev_point } +} From bee6ae0dc70b6851b27887cf33d970b62332e58a Mon Sep 17 00:00:00 2001 From: your name Date: Tue, 20 Aug 2024 01:38:50 -0400 Subject: [PATCH 19/23] Add Eli Pousson to contributors Tidy DESCRIPTION --- DESCRIPTION | 82 ++++++++++++++++++++++++++++++----------------------- 1 file changed, 46 insertions(+), 36 deletions(-) diff --git a/DESCRIPTION b/DESCRIPTION index 963a066..811d46e 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -1,51 +1,61 @@ Package: elevatr Title: Access Elevation Data from Various APIs Version: 1.0.0.9999 -Authors@R: c(person("Jeffrey", "Hollister", email = "hollister.jeff@epa.gov", - role = c("aut", "cre"), comment = c(ORCID = "0000-0002-9254-9740")), - person("Tarak", "Shah", role = "ctb"), - person("Jakub", "Nowosad", role = "ctb", comment = c(ORCID = "0000-0002-1057-3721")), - person("Alec L.", "Robitaille", role = "ctb", comment = c(ORCID = "0000-0002-4706-1762")), - person("Marcus W.", "Beck", role = "rev", comment = c(ORCID = "0000-0002-4996-0059")), - person("Mike", "Johnson", role = "ctb", comment = c(ORCID = "0000-0002-5288-8350"))) +Authors@R: c( + person("Jeffrey", "Hollister", , "hollister.jeff@epa.gov", role = c("aut", "cre"), + comment = c(ORCID = "0000-0002-9254-9740")), + person("Tarak", "Shah", role = "ctb"), + person("Jakub", "Nowosad", role = "ctb", + comment = c(ORCID = "0000-0002-1057-3721")), + person("Alec L.", "Robitaille", role = "ctb", + comment = c(ORCID = "0000-0002-4706-1762")), + person("Marcus W.", "Beck", role = "rev", + comment = c(ORCID = "0000-0002-4996-0059")), + person("Mike", "Johnson", role = "ctb", + comment = c(ORCID = "0000-0002-5288-8350")), + person("Eli", "Pousson", role = "ctb", + comment = c(ORCID = "0000-0001-8280-1706")) + ) +Maintainer: Jeffrey Hollister +Description: Several web services are available that provide access to + elevation data. This package provides access to many of those services + and returns elevation data either as an 'sf' simple features object + from point elevation services or as a 'raster' object from raster + elevation services. In future versions, 'elevatr' will drop support + for 'raster' and will instead return 'terra' objects. Currently, the + package supports access to the Amazon Web Services Terrain Tiles + , the Open Topography + Global Datasets API , and the + USGS Elevation Point Query Service + . +License: MIT + file LICENSE URL: https://github.com/jhollist/elevatr/ BugReports: https://github.com/jhollist/elevatr/issues/ -Maintainer: Jeffrey Hollister -Description: Several web services are available that provide access to elevation - data. This package provides access to many of those services and - returns elevation data either as an 'sf' simple features object - from point elevation services or as a 'raster' object from raster - elevation services. In future versions, 'elevatr' will drop - support for 'raster' and will instead return 'terra' objects. - Currently, the package supports access to the Amazon Web Services - Terrain Tiles , - the Open Topography Global Datasets - API , and the USGS - Elevation Point Query Service . -Depends: R (>= 3.5.0) +Depends: + R (>= 3.5.0) Imports: + curl, + furrr, + future, httr, jsonlite, + methods, progressr, - sf, - terra, - future, - furrr, purrr, - units, - slippymath, - curl, raster, - methods -License: MIT + file LICENSE -Encoding: UTF-8 -LazyData: true -RoxygenNote: 7.3.2 + sf, + slippymath, + terra, + units Suggests: - testthat, + formatR, knitr, + progress, rmarkdown, - formatR, - progress -VignetteBuilder: knitr + testthat +VignetteBuilder: + knitr +Encoding: UTF-8 +LazyData: true Roxygen: list(markdown = TRUE) +RoxygenNote: 7.3.2 From 4e47e92492130d7a238ceea3ac2246961c0f3fa0 Mon Sep 17 00:00:00 2001 From: your name Date: Tue, 20 Aug 2024 01:39:02 -0400 Subject: [PATCH 20/23] Update NEWS --- NEWS.md | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/NEWS.md b/NEWS.md index e8014d0..d8ccc60 100644 --- a/NEWS.md +++ b/NEWS.md @@ -3,6 +3,11 @@ elevatr 0.99.0 (2024-0x-xx) # API Changes - add argument for specifying temp directory for download files. Allows users to specify a specific location. (Thanks, @andrew-caudillo: https://github.com/jhollist/elevatr/issues/95) +# Added Functionality +- Add new `get_elev_profile()` function (#93, @elipousson) + +# Fixes +- Fix issue with `SpatVector` input objects (#91, @elipousson) elevatr 0.99.0 (2023-09-11) ============= @@ -200,4 +205,4 @@ elevatr 0.1.0 (2017-01-25) ========================== ## Initial CRAN Release -- This is the initial CRAN release. Provides access to point elevation data from USGS and from Mapzen. Provides access to raster DEM from Mapzen Terrain Tiles and AWS Terrain Tiles. \ No newline at end of file +- This is the initial CRAN release. Provides access to point elevation data from USGS and from Mapzen. Provides access to raster DEM from Mapzen Terrain Tiles and AWS Terrain Tiles. From a7fb811d6160a5bfd98dfcebb0a13efa22f8e6e6 Mon Sep 17 00:00:00 2001 From: Eli Pousson Date: Tue, 17 Sep 2024 23:52:22 -0400 Subject: [PATCH 21/23] Fix merge conflicts w/ migrated repo Also update get_elev_raster to use terra:rast if raster package is not installed --- DESCRIPTION | 4 ++-- NEWS.md | 3 ++- R/get_elev_point.R | 12 +++++++++--- R/get_elev_raster.R | 23 ++++++++++++++--------- man/get_aws_points.Rd | 4 +++- man/get_aws_terrain.Rd | 2 +- man/get_elev_point.Rd | 4 ++++ man/get_elev_raster.Rd | 1 + man/get_epqs.Rd | 2 +- vignettes/introduction_to_elevatr.Rmd | 10 +++------- 10 files changed, 40 insertions(+), 25 deletions(-) diff --git a/DESCRIPTION b/DESCRIPTION index 811d46e..4748f7d 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -29,8 +29,8 @@ Description: Several web services are available that provide access to USGS Elevation Point Query Service . License: MIT + file LICENSE -URL: https://github.com/jhollist/elevatr/ -BugReports: https://github.com/jhollist/elevatr/issues/ +URL: https://github.com/usepa/elevatr/ +BugReports: https://github.com/usepa/elevatr/issues/ Depends: R (>= 3.5.0) Imports: diff --git a/NEWS.md b/NEWS.md index d8ccc60..62c0d3c 100644 --- a/NEWS.md +++ b/NEWS.md @@ -1,7 +1,8 @@ -elevatr 0.99.0 (2024-0x-xx) +elevatr 1.0.0 (2024-0x-xx) ============= # API Changes - add argument for specifying temp directory for download files. Allows users to specify a specific location. (Thanks, @andrew-caudillo: https://github.com/jhollist/elevatr/issues/95) +- exposed ncpu argument so user can control. Defaults to 2 if more than 2 cores available. Thanks to @courtiol for finding this issue and the suggestion! # Added Functionality - Add new `get_elev_profile()` function (#93, @elipousson) diff --git a/R/get_elev_point.R b/R/get_elev_point.R index c7f48b0..c0a66ca 100644 --- a/R/get_elev_point.R +++ b/R/get_elev_point.R @@ -23,6 +23,8 @@ #' cases provided the points are in a similar geographic area. The #' "aws" source downloads a DEM using \code{get_elev_raster} and then #' extracts the elevation for each point. +#' @param ncpu Number of CPU's to use when downloading aws tiles. Defaults to 2 +#' if more than two available, 1 otherwise. #' @param overwrite A logical indicating that existing \code{elevation} and #' \code{elev_units} columns should be overwritten. Default is #' FALSE and \code{get_elev_point} will error if these columns @@ -81,6 +83,7 @@ get_elev_point <- function(locations, prj = NULL, src = c("epqs", "aws"), + ncpu = ifelse(future::availableCores() > 2, 2, 1), coords = c("x", "y"), overwrite = FALSE, ..., @@ -124,6 +127,7 @@ get_elev_point <- function(locations, if (src == "epqs") { locations_prj <- get_epqs( locations, + ncpu = ncpu, elev_col = elev_col, units = units, ... @@ -135,6 +139,7 @@ get_elev_point <- function(locations, if(src == "aws") { locations_prj <- get_aws_points( locations, + ncpu = ncpu, verbose = FALSE, elev_col = elev_col, units = units, @@ -200,7 +205,7 @@ get_elev_point <- function(locations, #' @keywords internal get_epqs <- function(locations, units = c("meters", "feet"), - ncpu = future::availableCores() - 1, + ncpu = ifelse(future::availableCores() > 2, 2, 1), serial = NULL, elev_col = "elevation") { @@ -348,7 +353,7 @@ get_epqs <- function(locations, #' @param units Character string of either meters or feet. Conversions for #' 'aws' are handled in R as the AWS terrain tiles are served in #' meters. -#' @param verbose Report back messages. +#' @inheritParams get_elev_raster #' @param ... Arguments to be passed to \code{get_elev_raster} #' @return a list with a SpatialPointsDataFrame or sf POINT or MULTIPOINT object with #' elevation added to the data slot and a character of the elevation units @@ -357,11 +362,12 @@ get_epqs <- function(locations, get_aws_points <- function(locations, z = 5, units = c("meters", "feet"), + ncpu = ifelse(future::availableCores() > 2, 2, 1), verbose = TRUE, elev_col = "elevation", ...) { units <- match.arg(units) - dem <- get_elev_raster(locations, z, verbose = verbose, ...) + dem <- get_elev_raster(locations, z, ncpu = ncpu, verbose = verbose, ...) dem <- methods::as(dem, "SpatRaster") elevation <- units::set_units(terra::extract(dem, locations)[,2], "m") if (units == "feet"){ diff --git a/R/get_elev_raster.R b/R/get_elev_raster.R index 678a04f..09efa27 100644 --- a/R/get_elev_raster.R +++ b/R/get_elev_raster.R @@ -86,7 +86,8 @@ get_elev_raster <- function(locations, z, prj = NULL, src = c("aws", "gl3", "gl1", "alos", "srtm15plus"), expand = NULL, clip = c("tile", "bbox", "locations"), verbose = TRUE, neg_to_na = FALSE, - override_size_check = FALSE,tmp_dir = tempdir(), + override_size_check = FALSE, tmp_dir = tempdir(), + ncpu = ifelse(future::availableCores() > 2, 2, 1), coords = c("x", "y"), ...){ # First Check for internet @@ -129,7 +130,7 @@ get_elev_raster <- function(locations, z, prj = NULL, # Pass of locations to APIs to get data as raster if(src == "aws") { raster_elev <- get_aws_terrain(locations, z, prj = prj, expand = expand, - tmp_dir = tmp_dir, ...) + tmp_dir = tmp_dir, ncpu = ncpu, ...) } else if(src %in% c("gl3", "gl1", "alos", "srtm15plus")){ raster_elev <- get_opentopo(locations, src, prj = prj, expand = expand, tmp_dir = tmp_dir, ...) @@ -153,10 +154,14 @@ get_elev_raster <- function(locations, z, prj = NULL, } attr(raster_elev, "sources") <- sources - #Returning raster for now - #Switch to SpatRaster in near future. - raster::raster(raster_elev) + if (requireNamespace("raster", quietly = TRUE)) { + # Return raster if raster package is installed + raster::raster(raster_elev) + } else { + # Otherwise return SpatRaster + terra::rast(raster_elev) + } } #' Get a digital elevation model from the AWS Terrain Tiles @@ -198,7 +203,7 @@ get_elev_raster <- function(locations, z, prj = NULL, #' @keywords internal get_aws_terrain <- function(locations, z, prj, expand=NULL, - ncpu = future::availableCores() - 1, + ncpu = ifelse(future::availableCores() > 2, 2, 1), serial = NULL, tmp_dir = tempdir(), ...){ # Expand (if needed) and re-project bbx to dd @@ -247,7 +252,7 @@ get_aws_terrain <- function(locations, z, prj, expand=NULL, tmpfile <- tempfile(tmpdir = tmp_dir, fileext = ".tif") resp <- httr::GET(x, - httr::user_agent("elevatr R package (https://github.com/jhollist/elevatr)"), + httr::user_agent("elevatr R package (https://github.com/usepa/elevatr)"), httr::write_disk(tmpfile,overwrite=TRUE), ...) if (!grepl("image/tif", httr::http_type(resp))) { stop(paste("This url:", x,"did not return a tif"), call. = FALSE) @@ -265,7 +270,7 @@ get_aws_terrain <- function(locations, z, prj, expand=NULL, p() tmpfile <- tempfile(tmpdir = tmp_dir, fileext = ".tif") resp <- httr::GET(x, - httr::user_agent("elevatr R package (https://github.com/jhollist/elevatr)"), + httr::user_agent("elevatr R package (https://github.com/usepa/elevatr)"), httr::write_disk(tmpfile,overwrite=TRUE), ...) if (!grepl("image/tif", httr::http_type(resp))) { stop(paste("This url:", x,"did not return a tif"), call. = FALSE) @@ -398,7 +403,7 @@ get_opentopo <- function(locations, src, prj, expand=NULL, tmp_dir = tempdir(), message("Downloading OpenTopography DEMs") resp <- httr::GET(url,httr::write_disk(tmpfile,overwrite=TRUE), - httr::user_agent("elevatr R package (https://github.com/jhollist/elevatr)"), + httr::user_agent("elevatr R package (https://github.com/usepa/elevatr)"), httr::progress(), ...) message("") if (httr::http_type(resp) != "application/octet-stream") { diff --git a/man/get_aws_points.Rd b/man/get_aws_points.Rd index 3778347..42ed745 100644 --- a/man/get_aws_points.Rd +++ b/man/get_aws_points.Rd @@ -8,6 +8,7 @@ get_aws_points( locations, z = 5, units = c("meters", "feet"), + ncpu = ifelse(future::availableCores() > 2, 2, 1), verbose = TRUE, elev_col = "elevation", ... @@ -31,7 +32,8 @@ default value is 5 is supplied.} 'aws' are handled in R as the AWS terrain tiles are served in meters.} -\item{verbose}{Report back messages.} +\item{verbose}{Toggles on and off the note about units and coordinate +reference system.} \item{...}{Arguments to be passed to \code{get_elev_raster}} } diff --git a/man/get_aws_terrain.Rd b/man/get_aws_terrain.Rd index 9fcfe0b..7690a1c 100644 --- a/man/get_aws_terrain.Rd +++ b/man/get_aws_terrain.Rd @@ -15,7 +15,7 @@ get_aws_terrain( z, prj, expand = NULL, - ncpu = future::availableCores() - 1, + ncpu = ifelse(future::availableCores() > 2, 2, 1), serial = NULL, tmp_dir = tempdir(), ... diff --git a/man/get_elev_point.Rd b/man/get_elev_point.Rd index 300e91e..38441cd 100644 --- a/man/get_elev_point.Rd +++ b/man/get_elev_point.Rd @@ -8,6 +8,7 @@ get_elev_point( locations, prj = NULL, src = c("epqs", "aws"), + ncpu = ifelse(future::availableCores() > 2, 2, 1), coords = c("x", "y"), overwrite = FALSE, ..., @@ -35,6 +36,9 @@ cases provided the points are in a similar geographic area. The "aws" source downloads a DEM using \code{get_elev_raster} and then extracts the elevation for each point.} +\item{ncpu}{Number of CPU's to use when downloading aws tiles. Defaults to 2 +if more than two available, 1 otherwise.} + \item{coords}{in case of point data: names or numbers of the numeric columns holding coordinates} \item{overwrite}{A logical indicating that existing \code{elevation} and diff --git a/man/get_elev_raster.Rd b/man/get_elev_raster.Rd index 29e2570..112ed4e 100644 --- a/man/get_elev_raster.Rd +++ b/man/get_elev_raster.Rd @@ -15,6 +15,7 @@ get_elev_raster( neg_to_na = FALSE, override_size_check = FALSE, tmp_dir = tempdir(), + ncpu = ifelse(future::availableCores() > 2, 2, 1), coords = c("x", "y"), ... ) diff --git a/man/get_epqs.Rd b/man/get_epqs.Rd index c1d8573..7b52e34 100644 --- a/man/get_epqs.Rd +++ b/man/get_epqs.Rd @@ -7,7 +7,7 @@ get_epqs( locations, units = c("meters", "feet"), - ncpu = future::availableCores() - 1, + ncpu = ifelse(future::availableCores() > 2, 2, 1), serial = NULL, elev_col = "elevation" ) diff --git a/vignettes/introduction_to_elevatr.Rmd b/vignettes/introduction_to_elevatr.Rmd index 55e310f..c2b94d4 100644 --- a/vignettes/introduction_to_elevatr.Rmd +++ b/vignettes/introduction_to_elevatr.Rmd @@ -1,12 +1,8 @@ --- -title: "Accessing elevation data in R with the elevatr package, version 1" +title: "Introduction to elevatr" author: "Jeffrey W. Hollister" date: '`r Sys.Date()`' -output: - html_document: - theme: readable - toc: yes - toc_float: yes +output: rmarkdown::html_vignette vignette: > %\VignetteIndexEntry{Introduction to elevatr} %\VignetteEncoding{UTF-8} @@ -213,4 +209,4 @@ Below is an example for grabbing the OpenTopography SRTM data. lake_srtmgl1 <- get_elev_raster(lake, src = "gl1", clip = "bbox", expand = 1000) plot(lake_srtmgl1) plot(st_geometry(lake), add = TRUE, col = "blue") -``` \ No newline at end of file +``` From 844467f471f7e8e0672bd6e9b9bfa13e29cccb34 Mon Sep 17 00:00:00 2001 From: Eli Pousson Date: Tue, 17 Sep 2024 23:54:08 -0400 Subject: [PATCH 22/23] Update check-standard.yml to fix merge conflict --- .github/workflows/check-standard.yaml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/check-standard.yaml b/.github/workflows/check-standard.yaml index 6504e32..73da5dc 100644 --- a/.github/workflows/check-standard.yaml +++ b/.github/workflows/check-standard.yaml @@ -18,9 +18,9 @@ jobs: - {os: windows-latest, r: 'release'} - {os: windows-latest, r: 'oldrel'} - {os: macos-latest, r: 'release'} - - {os: ubuntu-20.04, r: 'release', rspm: "https://packagemanager.rstudio.com/cran/__linux__/focal/latest"} + #- {os: ubuntu-20.04, r: 'release', rspm: "https://packagemanager.rstudio.com/cran/__linux__/focal/latest"} - {os: ubuntu-latest, r: 'devel', rspm: "https://packagemanager.rstudio.com/cran/__linux__/focal/latest"} - - {os: ubuntu-20.04, r: 'oldrel', rspm: "https://packagemanager.rstudio.com/cran/__linux__/focal/latest"} + #- {os: ubuntu-20.04, r: 'oldrel', rspm: "https://packagemanager.rstudio.com/cran/__linux__/focal/latest"} env: R_REMOTES_NO_ERRORS_FROM_WARNINGS: true @@ -58,32 +58,32 @@ jobs: do eval sudo $cmd done < <(Rscript -e 'writeLines(remotes::system_requirements("ubuntu", "20.04"))') - + - name: "Install spatial libraries on linux" if: runner.os == 'Linux' run: sudo apt-get install libgdal-dev libproj-dev libgeos-dev libudunits2-dev - + - name: "Install spatial libraries on macOS" if: runner.os == 'macOS' run: | # conflicts with gfortran from r-lib/actions when linking gcc # rm '/usr/local/bin/gfortran' brew install pkg-config gdal proj geos sqlite3 - + - name: Install dependencies on windows and mac if: runner.os != 'Linux' run: | remotes::install_deps(dependencies = TRUE, type = "binary") remotes::install_cran("rcmdcheck") shell: Rscript {0} - + - name: Install dependencies linux if: runner.os == 'Linux' run: | remotes::install_deps(dependencies = TRUE) remotes::install_cran("rcmdcheck") shell: Rscript {0} - + - name: Set API key env: OPENTOPO_KEY: ${{ secrets.OPENTOPO }} @@ -102,4 +102,4 @@ jobs: uses: actions/upload-artifact@main with: name: ${{ runner.os }}-r${{ matrix.config.r }}-results - path: check \ No newline at end of file + path: check From 834e2bc0fe5da6f111cef617386dd8df0a388805 Mon Sep 17 00:00:00 2001 From: Eli Pousson Date: Wed, 18 Sep 2024 00:01:02 -0400 Subject: [PATCH 23/23] Fill in missing argument definitions --- R/get_elev_raster.R | 4 ++++ man/get_aws_points.Rd | 3 +++ man/get_elev_raster.Rd | 6 +++++- 3 files changed, 12 insertions(+), 1 deletion(-) diff --git a/R/get_elev_raster.R b/R/get_elev_raster.R index 09efa27..e7bd45a 100644 --- a/R/get_elev_raster.R +++ b/R/get_elev_raster.R @@ -46,6 +46,10 @@ #' temporary location. Alternatively, the user may supply an #' existing path for these raster files. New folders are not #' created by \code{get_elev_raster}. +#' @param ncpu Number of CPU's to use when downloading aws tiles. Defaults to 2 +#' if more than two available, 1 otherwise. +#' @param coords Coordinate column names passed to [sf::st_as_sf()]. Defaults to +#' `c("x", "y")`. #' @param ... Extra arguments to pass to \code{httr::GET} via a named vector, #' \code{config}. See #' \code{\link{get_aws_terrain}} for more details. diff --git a/man/get_aws_points.Rd b/man/get_aws_points.Rd index 42ed745..a14361b 100644 --- a/man/get_aws_points.Rd +++ b/man/get_aws_points.Rd @@ -32,6 +32,9 @@ default value is 5 is supplied.} 'aws' are handled in R as the AWS terrain tiles are served in meters.} +\item{ncpu}{Number of CPU's to use when downloading aws tiles. Defaults to 2 +if more than two available, 1 otherwise.} + \item{verbose}{Toggles on and off the note about units and coordinate reference system.} diff --git a/man/get_elev_raster.Rd b/man/get_elev_raster.Rd index 112ed4e..bd0ac6b 100644 --- a/man/get_elev_raster.Rd +++ b/man/get_elev_raster.Rd @@ -73,7 +73,11 @@ temporary location. Alternatively, the user may supply an existing path for these raster files. New folders are not created by \code{get_elev_raster}.} -\item{coords}{in case of point data: names or numbers of the numeric columns holding coordinates} +\item{ncpu}{Number of CPU's to use when downloading aws tiles. Defaults to 2 +if more than two available, 1 otherwise.} + +\item{coords}{Coordinate column names passed to \code{\link[sf:st_as_sf]{sf::st_as_sf()}}. Defaults to +\code{c("x", "y")}.} \item{...}{Extra arguments to pass to \code{httr::GET} via a named vector, \code{config}. See