diff --git a/.github/workflows/check-standard.yaml b/.github/workflows/check-standard.yaml index 4b1ba8c..73da5dc 100644 --- a/.github/workflows/check-standard.yaml +++ b/.github/workflows/check-standard.yaml @@ -58,32 +58,32 @@ jobs: do eval sudo $cmd done < <(Rscript -e 'writeLines(remotes::system_requirements("ubuntu", "20.04"))') - + - name: "Install spatial libraries on linux" if: runner.os == 'Linux' run: sudo apt-get install libgdal-dev libproj-dev libgeos-dev libudunits2-dev - + - name: "Install spatial libraries on macOS" if: runner.os == 'macOS' run: | # conflicts with gfortran from r-lib/actions when linking gcc # rm '/usr/local/bin/gfortran' brew install pkg-config gdal proj geos sqlite3 - + - name: Install dependencies on windows and mac if: runner.os != 'Linux' run: | remotes::install_deps(dependencies = TRUE, type = "binary") remotes::install_cran("rcmdcheck") shell: Rscript {0} - + - name: Install dependencies linux if: runner.os == 'Linux' run: | remotes::install_deps(dependencies = TRUE) remotes::install_cran("rcmdcheck") shell: Rscript {0} - + - name: Set API key env: OPENTOPO_KEY: ${{ secrets.OPENTOPO }} @@ -102,4 +102,4 @@ jobs: uses: actions/upload-artifact@main with: name: ${{ runner.os }}-r${{ matrix.config.r }}-results - path: check \ No newline at end of file + path: check diff --git a/DESCRIPTION b/DESCRIPTION index 701382f..4748f7d 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -1,50 +1,61 @@ Package: elevatr Title: Access Elevation Data from Various APIs Version: 1.0.0.9999 -Authors@R: c(person("Jeffrey", "Hollister", email = "hollister.jeff@epa.gov", - role = c("aut", "cre"), comment = c(ORCID = "0000-0002-9254-9740")), - person("Tarak", "Shah", role = "ctb"), - person("Jakub", "Nowosad", role = "ctb", comment = c(ORCID = "0000-0002-1057-3721")), - person("Alec L.", "Robitaille", role = "ctb", comment = c(ORCID = "0000-0002-4706-1762")), - person("Marcus W.", "Beck", role = "rev", comment = c(ORCID = "0000-0002-4996-0059")), - person("Mike", "Johnson", role = "ctb", comment = c(ORCID = "0000-0002-5288-8350"))) +Authors@R: c( + person("Jeffrey", "Hollister", , "hollister.jeff@epa.gov", role = c("aut", "cre"), + comment = c(ORCID = "0000-0002-9254-9740")), + person("Tarak", "Shah", role = "ctb"), + person("Jakub", "Nowosad", role = "ctb", + comment = c(ORCID = "0000-0002-1057-3721")), + person("Alec L.", "Robitaille", role = "ctb", + comment = c(ORCID = "0000-0002-4706-1762")), + person("Marcus W.", "Beck", role = "rev", + comment = c(ORCID = "0000-0002-4996-0059")), + person("Mike", "Johnson", role = "ctb", + comment = c(ORCID = "0000-0002-5288-8350")), + person("Eli", "Pousson", role = "ctb", + comment = c(ORCID = "0000-0001-8280-1706")) + ) +Maintainer: Jeffrey Hollister +Description: Several web services are available that provide access to + elevation data. This package provides access to many of those services + and returns elevation data either as an 'sf' simple features object + from point elevation services or as a 'raster' object from raster + elevation services. In future versions, 'elevatr' will drop support + for 'raster' and will instead return 'terra' objects. Currently, the + package supports access to the Amazon Web Services Terrain Tiles + , the Open Topography + Global Datasets API , and the + USGS Elevation Point Query Service + . +License: MIT + file LICENSE URL: https://github.com/usepa/elevatr/ BugReports: https://github.com/usepa/elevatr/issues/ -Maintainer: Jeffrey Hollister -Description: Several web services are available that provide access to elevation - data. This package provides access to many of those services and - returns elevation data either as an 'sf' simple features object - from point elevation services or as a 'raster' object from raster - elevation services. In future versions, 'elevatr' will drop - support for 'raster' and will instead return 'terra' objects. - Currently, the package supports access to the Amazon Web Services - Terrain Tiles , - the Open Topography Global Datasets - API , and the USGS - Elevation Point Query Service . -Depends: R (>= 3.5.0) +Depends: + R (>= 3.5.0) Imports: + curl, + furrr, + future, httr, jsonlite, + methods, progressr, - sf, - terra, - future, - furrr, purrr, - units, - slippymath, - curl, raster, - methods -License: MIT + file LICENSE -Encoding: UTF-8 -LazyData: true + sf, + slippymath, + terra, + units Suggests: - testthat, + formatR, knitr, + progress, rmarkdown, - formatR, - progress -VignetteBuilder: knitr + testthat +VignetteBuilder: + knitr +Encoding: UTF-8 +LazyData: true +Roxygen: list(markdown = TRUE) RoxygenNote: 7.3.2 diff --git a/NAMESPACE b/NAMESPACE index d6b8469..eb0314c 100644 --- a/NAMESPACE +++ b/NAMESPACE @@ -3,6 +3,7 @@ export(get_aws_points) export(get_aws_terrain) export(get_elev_point) +export(get_elev_profile) export(get_elev_raster) export(get_epqs) export(get_opentopo) diff --git a/NEWS.md b/NEWS.md index 52f5c23..62c0d3c 100644 --- a/NEWS.md +++ b/NEWS.md @@ -4,6 +4,12 @@ elevatr 1.0.0 (2024-0x-xx) - add argument for specifying temp directory for download files. Allows users to specify a specific location. (Thanks, @andrew-caudillo: https://github.com/jhollist/elevatr/issues/95) - exposed ncpu argument so user can control. Defaults to 2 if more than 2 cores available. Thanks to @courtiol for finding this issue and the suggestion! +# Added Functionality +- Add new `get_elev_profile()` function (#93, @elipousson) + +# Fixes +- Fix issue with `SpatVector` input objects (#91, @elipousson) + elevatr 0.99.0 (2023-09-11) ============= @@ -200,4 +206,4 @@ elevatr 0.1.0 (2017-01-25) ========================== ## Initial CRAN Release -- This is the initial CRAN release. Provides access to point elevation data from USGS and from Mapzen. Provides access to raster DEM from Mapzen Terrain Tiles and AWS Terrain Tiles. \ No newline at end of file +- This is the initial CRAN release. Provides access to point elevation data from USGS and from Mapzen. Provides access to raster DEM from Mapzen Terrain Tiles and AWS Terrain Tiles. diff --git a/R/get_elev_point.R b/R/get_elev_point.R index f2fb56b..6bb9356 100644 --- a/R/get_elev_point.R +++ b/R/get_elev_point.R @@ -1,26 +1,26 @@ #' Get Point Elevation -#' -#' This function provides access to point elevations using either the USGS -#' Elevation Point Query Service (US Only) or by extracting point elevations -#' from the AWS Terrain Tiles. The function accepts a \code{data.frame} of x -#' (long) and y (lat) or a \code{sf} \code{POINT} or \code{MULTIPOINT} object as -#' input. A \code{sf} \code{POINT} or \code{MULTIPOINT} object is returned with -#' elevation and elevation units as an added \code{data.frame}. -#' -#' -#' @param locations Either a \code{data.frame} with x (e.g. longitude) as the -#' first column and y (e.g. latitude) as the second column, a -#' \code{SpatialPoints}/\code{SpatialPointsDataFrame}, or a -#' \code{sf} \code{POINT} or \code{MULTIPOINT} object. -#' Elevation for these points will be returned in the +#' +#' This function provides access to point elevations using either the USGS +#' Elevation Point Query Service (US Only) or by extracting point elevations +#' from the AWS Terrain Tiles. The function accepts a \code{data.frame} of x +#' (long) and y (lat) or a \code{sf} \code{POINT} or \code{MULTIPOINT} object as +#' input. A \code{sf} \code{POINT} or \code{MULTIPOINT} object is returned with +#' elevation and elevation units as an added \code{data.frame}. +#' +#' +#' @param locations Either a \code{data.frame} with x (e.g. longitude) as the +#' first column and y (e.g. latitude) as the second column, a +#' \code{SpatialPoints}/\code{SpatialPointsDataFrame}, or a +#' \code{sf} \code{POINT} or \code{MULTIPOINT} object. +#' Elevation for these points will be returned in the #' originally supplied class. -#' @param prj A valid input to \code{\link{st_crs}}. This +#' @param prj A valid input to \code{\link{st_crs}}. This #' argument is required for a \code{data.frame} of locations and optional #' for \code{sf} locations. -#' @param src A character indicating which API to use, either "epqs" or "aws" -#' accepted. The "epqs" source is relatively slow for larger numbers -#' of points (e.g. > 500). The "aws" source may be quicker in these -#' cases provided the points are in a similar geographic area. The +#' @param src A character indicating which API to use, either "epqs" or "aws" +#' accepted. The "epqs" source is relatively slow for larger numbers +#' of points (e.g. > 500). The "aws" source may be quicker in these +#' cases provided the points are in a similar geographic area. The #' "aws" source downloads a DEM using \code{get_elev_raster} and then #' extracts the elevation for each point. #' @param ncpu Number of CPU's to use when downloading aws tiles. Defaults to 2 @@ -29,14 +29,19 @@ #' \code{elev_units} columns should be overwritten. Default is #' FALSE and \code{get_elev_point} will error if these columns #' already exist. -#' @param ... Additional arguments passed to get_epqs or get_aws_points. When -#' using "aws" as the source, pay attention to the `z` argument. A -#' defualt of 5 is used, but this uses a raster with a large ~4-5 km -#' pixel. Additionally, the source data changes as zoom levels -#' increase. -#' Read \url{https://github.com/tilezen/joerd/blob/master/docs/data-sources.md#what-is-the-ground-resolution} -#' for details. -#' @return Function returns an \code{sf} object in the projection specified by +#' @param ... Additional arguments passed to get_epqs or get_aws_points. When +#' using "aws" as the source, pay attention to the `z` argument. A +#' default of 5 is used, but this uses a raster with a large ~4-5 km +#' pixel. Additionally, the source data changes as zoom levels +#' increase. +#' Read \url{https://github.com/tilezen/joerd/blob/master/docs/data-sources.md#what-is-the-ground-resolution} +#' for details. +#' @param units Default: `c("meters", "feet")`. Set to "meters" by default. Any +#' character string that is a valid distance unit supported by +#' `units::set_units()`. +#' @param elev_units_col Elevation units column. +#' @inheritParams loc_check +#' @return Function returns an \code{sf} object in the projection specified by #' the \code{prj} argument. #' @export #' @examples @@ -44,9 +49,9 @@ #' library(elevatr) #' library(sf) #' library(terra) -#' -#' mts <- data.frame(x = c(-71.3036, -72.8145), -#' y = c(44.2700, 44.5438), +#' +#' mts <- data.frame(x = c(-71.3036, -72.8145), +#' y = c(44.2700, 44.5438), #' names = c("Mt. Washington", "Mt. Mansfield")) #' ll_prj <- 4326 #' mts_sf <- st_as_sf(x = mts, coords = c("x", "y"), crs = ll_prj) @@ -54,127 +59,161 @@ #' mts_raster <- rast(mts_sf, nrow = 5, ncol = 5) #' # Raster with cells for each location #' mts_raster_loc <- terra::rasterize(mts_sf, rast(mts_sf, nrow = 10, ncol = 10)) -#' +#' #' get_elev_point(locations = mts, prj = ll_prj) #' get_elev_point(locations = mts, units="feet", prj = ll_prj) #' get_elev_point(locations = mts_sf) #' get_elev_point(locations = mts_raster) #' get_elev_point(locations = mts_raster_loc) -#' -#' +#' +#' #' # Code to split into a loop and grab point at a time. -#' # This is usually faster for points that are spread apart -#' +#' # This is usually faster for points that are spread apart +#' #' library(dplyr) -#' +#' #' elev <- vector("numeric", length = nrow(mts)) #' for(i in seq_along(mts)){ -#' elev[i]<-get_elev_point(locations = mts[i,], prj = ll_prj, src = "aws", +#' elev[i]<-get_elev_point(locations = mts[i,], prj = ll_prj, src = "aws", #' z = 10)$elevation} #' mts_elev <- cbind(mts, elev) #' mts_elev #' } #' -get_elev_point <- function(locations, prj = NULL, src = c("epqs", "aws"), +get_elev_point <- function(locations, + prj = NULL, + src = c("epqs", "aws"), ncpu = ifelse(future::availableCores() > 2, 2, 1), - overwrite = FALSE, ...){ - + coords = c("x", "y"), + overwrite = FALSE, + ..., + units = c("meters", "feet"), + elev_col = "elevation", + elev_units_col = "elev_units") { # First Check for internet if(!curl::has_internet()) { message("Please connect to the internet and try again.") return(NULL) } - + src <- match.arg(src) - + # Check for existing elevation/elev_units columns and overwrite or error - if(!overwrite & any(names(locations) %in% c("elevation", "elev_units"))){ - stop(paste0("The elevation and elev_units columns already exist.\n", + if (!overwrite && any(names(locations) %in% c(elev_col, elev_units_col))) { + stop(paste0("The elevation and elev_units columns already exist.\n", " To replace these columns set the overwrite argument to TRUE.")) } - - locations <- loc_check(locations,prj) - - if(is.null(prj)){ + + locations <- loc_check( + locations, + prj = prj, + coords = coords, + elev_col = elev_col + ) + + if (is.null(prj)) { prj <- sf::st_crs(locations) } - + + if (all(units %in% c("meters", "feet"))) { + units <- match.arg(units) + convert_units <- units + } else { + convert_units <- units + units <- "meters" + } + # Pass of reprojected to epqs or aws to get data as spatialpointsdataframe - if (src == "epqs"){ - locations_prj <- get_epqs(locations, ncpu = ncpu, ...) - units <- locations_prj[[2]] + if (src == "epqs") { + locations_prj <- get_epqs( + locations, + ncpu = ncpu, + elev_col = elev_col, + units = units, + ... + ) + locations_prj <- locations_prj[[1]] - } - - if(src == "aws"){ - locations_prj <- get_aws_points(locations, ncpu = ncpu, verbose = FALSE, ...) - units <- locations_prj[[2]] + } + + if(src == "aws") { + locations_prj <- get_aws_points( + locations, + ncpu = ncpu, + verbose = FALSE, + elev_col = elev_col, + units = units, + ... + ) + locations_prj <- locations_prj[[1]] } # Re-project back to original, add in units, and return - locations <- sf::st_transform(sf::st_as_sf(locations_prj), - sf::st_crs(locations)) - - if(is.null(nrow(locations))){ - nfeature <- length(locations) - } else { - nfeature <- nrow(locations) - } - - unit_column_name <- "elev_units" - - if(any(names(list(...)) %in% "units")){ - if(list(...)$units == "feet"){ - locations[[unit_column_name]] <- rep("feet", nfeature) - } else { - locations[[unit_column_name]] <- rep("meters", nfeature) - } - } else { - locations[[unit_column_name]] <- rep("meters", nfeature) - } - - if(src == "aws") { - message(paste("Note: Elevation units are in", units)) - } else { - message(paste("Note: Elevation units are in", - tolower(strsplit(units, "=")[[1]][2]))) + locations <- sf::st_transform( + sf::st_as_sf(locations_prj), + crs = prj + ) + + nfeature <- loc_length(locations) + + if (convert_units != units) { + print("here") + mode <- "standard" + elev_units_label <- convert_units + + # TODO: Implement handling for non-character units values + # if (!is.character(convert_units)) { + # elev_units_label <- unique(as.character(base::units(convert_units)[["numerator"]])) + # mode <- units::units_options("set_units_mode") + # } + stopifnot(is.character(convert_units) && (length(convert_units) == 1)) + + elevation <- units::set_units(locations[[elev_col]], value = units, mode = mode) + + locations[[elev_col]] <- as.numeric( + units::set_units(elevation, value = convert_units, mode = mode) + ) + + units <- convert_units } - locations + + locations[[elev_units_col]] <- rep(units, nfeature) + + message(paste("Note: Elevation units are in", units)) + + relocate_sf_col_end(locations) } #' Get point elevation data from the USGS Elevation Point Query Service -#' +#' #' Function for accessing elevation data from the USGS epqs -#' -#' @param locations A SpatialPointsDataFrame of the location(s) for which you -#' wish to return elevation. The first column is Longitude and -#' the second column is Latitude. -#' @param units Character string of either meters or feet. Conversions for +#' +#' @param locations A SpatialPointsDataFrame of the location(s) for which you +#' wish to return elevation. The first column is Longitude and +#' the second column is Latitude. +#' @param units Character string of either meters or feet. Conversions for #' 'epqs' are handled by the API itself. #' @param ncpu Number of CPU's to use when downloading aws tiles. Defaults to 2 #' if more than two available, 1 otherwise. #' @param ncpu Number of CPU's to use when downloading epqs data. -#' @param serial Logical to determine if API should be hit in serial or in +#' @param serial Logical to determine if API should be hit in serial or in #' parallel. TRUE will use purrr, FALSE will use furrr. -#' @return a list with a SpatialPointsDataFrame or sf POINT or MULTIPOINT object with +#' @return a list with a SpatialPointsDataFrame or sf POINT or MULTIPOINT object with #' elevation added to the data slot and a character of the elevation units #' @export #' @importFrom progressr handlers progressor with_progress #' @importFrom purrr map_dbl #' @keywords internal -get_epqs <- function(locations, units = c("meters","feet"), +get_epqs <- function(locations, + units = c("meters", "feet"), ncpu = ifelse(future::availableCores() > 2, 2, 1), - serial = NULL){ - + serial = NULL, + elev_col = "elevation") { + ll_prj <- "EPSG:4326" - - if(is.null(nrow(locations))){ - nfeature <- length(locations) - } else { - nfeature <- nrow(locations) - } - + nfeature <- loc_length(locations) + if(is.null(serial)){ if(nfeature < 25){ serial <- TRUE @@ -182,90 +221,91 @@ get_epqs <- function(locations, units = c("meters","feet"), serial <- FALSE } } - + base_url <- "https://epqs.nationalmap.gov/v1/json?" - - if(match.arg(units) == "meters"){ - units <- "Meters" - } else if(match.arg(units) == "feet"){ - units <- "Feet" - } - + + units <- match.arg(units) + + units <- switch(units, + meters = "Meters", + feet = "Feet" + ) + locations <- sf::st_transform(locations, sf::st_crs(ll_prj)) units <- paste0("&units=",units) - + get_epqs_resp <- function(coords, base_url, units, progress = FALSE) { - + Sys.sleep(0.001) #Getting non-repeateable errors maybe too many hits... x <- coords[1] y <- coords[2] - + loc <- paste0("x=",x, "&y=", y) url <- paste0(base_url,loc,units,"&output=json") - + resp <- tryCatch(httr::GET(url), error = function(e) e) n<-1 - + while(n <= 5 & any(class(resp) == "simpleError")) { # Hit it again to test as most times this is a unexplained timeout that # Corrects on next hit - + resp <- tryCatch(httr::GET(url), error = function(e) e) n <- n + 1 } - + if(n > 5 & any(class(resp) == "simpleError")) { - message(paste0("API returned:'", resp$message, - "'. NA returned for elevation"), + message(paste0("API returned:'", resp$message, + "'. NA returned for elevation"), call. = FALSE) return(NA) } - - if(httr::status_code(resp) == 200 & - httr::content(resp, "text", encoding = "UTF-8") == + + if(httr::status_code(resp) == 200 & + httr::content(resp, "text", encoding = "UTF-8") == "Invalid or missing input parameters."){ message("API returned an empty repsonse (e.g. location in ocean or not in U.S.). NA returned for elevation") return(NA) } else if(httr::status_code(resp) == 200){ - - resp <- tryCatch(jsonlite::fromJSON(httr::content(resp, "text", encoding = "UTF-8"), + + resp <- tryCatch(jsonlite::fromJSON(httr::content(resp, "text", encoding = "UTF-8"), simplifyVector = FALSE), error = function(e) e) n<-1 while(n <= 5 & any(class(resp) == "simpleError")) { # Hit it again. Getting hard to repeat API errors that usually self correct... - - resp <- tryCatch(jsonlite::fromJSON(httr::content(resp, "text", encoding = "UTF-8"), + + resp <- tryCatch(jsonlite::fromJSON(httr::content(resp, "text", encoding = "UTF-8"), simplifyVector = FALSE), error = function(e) e) n <- n + 1 } - + if(n >= 5 & any(class(resp) == "simpleError")) { message("API error, NA returned for elevation") return(NA) } - + } else { - message(paste0("API returned a status code:'", resp$status_code, - "'. NA returned for elevation"), + message(paste0("API returned a status code:'", resp$status_code, + "'. NA returned for elevation"), call. = FALSE) return(NA) } round(as.numeric(resp$value), 2) } - - coords_df <- split(data.frame(sf::st_coordinates(locations)), - seq_along(locations$elevation)) - + + coords_df <- split(data.frame(sf::st_coordinates(locations)), + seq_along(locations[["elevation"]])) + #elev_column_name <- make.unique(c(names(locations), "elevation")) #elev_column_name <- elev_column_name[!elev_column_name %in% names(locations)] - elev_column_name <- "elevation" - + elev_column_name <- elev_col + message("Downloading point elevations:") - + progressr::handlers( progressr::handler_progress( format = " Accessing point elevations [:bar] :percent", - clear = FALSE, + clear = FALSE, width= 60 )) #browser() @@ -278,102 +318,67 @@ get_epqs <- function(locations, units = c("meters","feet"), get_epqs_resp(x, base_url, units) }) } else { - + future::plan(future::multisession, workers = ncpu) p <- progressor(along = coords_df) - locations[[elev_column_name]] <-furrr::future_map_dbl(coords_df, + locations[[elev_column_name]] <- furrr::future_map_dbl(coords_df, function(x) { p() - get_epqs_resp(x, base_url, + get_epqs_resp(x, base_url, units)}) - + } }) - + # For areas without epqs values that return -1000000, switch to NA - locations$elevation[locations[[elev_column_name]] == -1000000] <- NA + locations[[elev_col]][locations[[elev_column_name]] == -1000000] <- NA location_list <- list(locations, units) if(serial==FALSE){future::plan(future::sequential)} location_list } #' Get point elevation data from the AWS Terrain Tiles -#' -#' Function for accessing elevation data from AWS and extracting the elevations -#' -#' @param locations Either a \code{data.frame} with x (e.g. longitude) as the -#' first column and y (e.g. latitude) as the second column, a -#' \code{SpatialPoints}/\code{SpatialPointsDataFrame}, or a -#' \code{sf} \code{POINT} or \code{MULTIPOINT} object. -#' Elevation for these points will be returned in the +#' +#' Function for accessing elevation data from AWS and extracting the elevations +#' +#' @param locations Either a \code{data.frame} with x (e.g. longitude) as the +#' first column and y (e.g. latitude) as the second column, a +#' \code{SpatialPoints}/\code{SpatialPointsDataFrame}, or a +#' \code{sf} \code{POINT} or \code{MULTIPOINT} object. +#' Elevation for these points will be returned in the #' originally supplied class. #' @param z The zoom level to return. The zoom ranges from 1 to 14. Resolution -#' of the resultant raster is determined by the zoom and latitude. For -#' details on zoom and resolution see the documentation from Mapzen at -#' \url{https://github.com/tilezen/joerd/blob/master/docs/data-sources.md#what-is-the-ground-resolution}. -#' default value is 5 is supplied. -#' @param units Character string of either meters or feet. Conversions for -#' 'aws' are handled in R as the AWS terrain tiles are served in +#' of the resultant raster is determined by the zoom and latitude. For +#' details on zoom and resolution see the documentation from Mapzen at +#' \url{https://github.com/tilezen/joerd/blob/master/docs/data-sources.md#what-is-the-ground-resolution}. +#' default value is 5 is supplied. +#' @param units Character string of either meters or feet. Conversions for +#' 'aws' are handled in R as the AWS terrain tiles are served in #' meters. -#' @param ncpu Number of CPU's to use when downloading aws tiles. Defaults to 2 -#' if more than two available, 1 otherwise. -#' @param verbose Report back messages. +#' @inheritParams get_elev_raster #' @param ... Arguments to be passed to \code{get_elev_raster} -#' @return a list with a SpatialPointsDataFrame or sf POINT or MULTIPOINT object with +#' @return a list with a SpatialPointsDataFrame or sf POINT or MULTIPOINT object with #' elevation added to the data slot and a character of the elevation units #' @export #' @keywords internal -get_aws_points <- function(locations, z = 5, units = c("meters", "feet"), +get_aws_points <- function(locations, + z = 5, + units = c("meters", "feet"), ncpu = ifelse(future::availableCores() > 2, 2, 1), - verbose = TRUE, ...){ + verbose = TRUE, + elev_col = "elevation", + ...) { units <- match.arg(units) dem <- get_elev_raster(locations, z, ncpu = ncpu, verbose = verbose, ...) dem <- methods::as(dem, "SpatRaster") elevation <- units::set_units(terra::extract(dem, locations)[,2], "m") - if(units == "feet"){ + if (units == "feet"){ elevation <- as.numeric(units::set_units(elevation, "ft")) } else { elevation <- as.numeric(elevation) } - locations$elevation <- round(elevation, 2) + + locations[[elev_col]] <- round(elevation, 2) location_list <- list(locations, units) location_list } - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/R/get_elev_profile.R b/R/get_elev_profile.R new file mode 100644 index 0000000..bff2bc7 --- /dev/null +++ b/R/get_elev_profile.R @@ -0,0 +1,107 @@ +#' Get Point Elevation along a Profile Line +#' +#' [get_elev_profile()] allows users to provide LINESTRING inputs to +#' [sf::st_line_sample()] or to cast LINESTRING to POINT before calling +#' [get_elev_point()] to get the point elevations. The function allows users to +#' get elevation along a profile line and, optionally, include a distance or +#' cumulative distance column in the output sf data frame. +#' +#' @inheritParams loc_linestring_to_point +#' @param include Option of columns to include: one of "default", "dist", or +#' "cumdist". Default value returns same columns as [get_elev_point()]. If +#' `include = "dist"`, the returned locations include the distance between +#' each successive pair of points. If `include = "cumdist"`, the distances are +#' provided as a cumulative sum. +#' @inheritParams get_elev_point +#' @param dist_col Column name to use for optional distance column. Only used if +#' `include` is set to `"dist"` or `"cumdist"`. +#' @examples +#' \dontrun{ +#' library(sf) +#' library(elevatr) +#' +#' nc <- st_read(system.file("shape/nc.shp", package = "sf")) |> +#' st_transform(3857) +#' +#' nc_line <- suppressWarnings( +#' st_cast( +#' st_union( +#' st_centroid(nc[1, ]), +#' st_centroid(nc[2, ]) +#' ), +#' to = "LINESTRING" +#' ) +#' ) +#' +#' elev_point <- get_elev_profile( +#' nc_line, +#' units = "ft", +#' dist = TRUE, +#' cumulative = TRUE, +#' n = 10 +#' ) +#' +#' elev_point +#' +#' } +#' @export +get_elev_profile <- function(locations, + n = NULL, + density = NULL, + type = "regular", + sample = NULL, + units = NULL, + include = c("default", "dist", "cumdist"), + ..., + prj = NULL, + overwrite = FALSE, + coords = c("x", "y"), + elev_col = "elevation", + elev_units_col = "elev_units", + dist_col = "distance") { + locations <- loc_check(locations, prj = prj, elev_col = elev_col, coords = coords) + + if (sf::st_is(locations, "LINESTRING")) { + location_coords <- loc_linestring_to_point( + locations, + n = n, + density = density, + type = type, + sample = sample + ) + } + + stopifnot( + "`locations` must use POINT or LINESTRING geometry" = sf::st_is(locations, "POINT") + ) + + if (inherits(locations, c("sfc", "sf")) && is.null(prj)) { + prj <- sf::st_crs(locations) + } + + elev_point <- get_elev_point( + locations, + prj = prj, + units = units, + elev_col = elev_col, + elev_units_col = elev_units_col + ) + + include <- match.arg(include) + + if (include != "default") { + cumulative <- include == "cumdist" + + dist_values <- st_point_distances( + elev_point, + cumulative = cumulative, + units = units, + prj = prj + ) + + locations[[dist_col]] <- dist_values + locations <- relocate_sf_col_end(locations) + } + + locations +} diff --git a/R/get_elev_raster.R b/R/get_elev_raster.R index eef4275..7be7377 100644 --- a/R/get_elev_raster.R +++ b/R/get_elev_raster.R @@ -1,120 +1,120 @@ #' Get Raster Elevation -#' -#' Several web services provide access to raster elevation. Currently, this -#' function provides access to the Amazon Web Services Terrain Tiles and the -#' Open Topography global datasets API. The function accepts a \code{data.frame} -#' of x (long) and y (lat), an \code{sf}, or \code{terra} object as input. A +#' +#' Several web services provide access to raster elevation. Currently, this +#' function provides access to the Amazon Web Services Terrain Tiles and the +#' Open Topography global datasets API. The function accepts a \code{data.frame} +#' of x (long) and y (lat), an \code{sf}, or \code{terra} object as input. A #' \code{RasterLayer} object is returned. In subsequent versions, a \code{SpatRaster} #' will be returned. -#' -#' @param locations Either a \code{data.frame} of x (long) and y (lat), an -#' \code{sf}, or \code{terra} object as input. +#' +#' @param locations Either a \code{data.frame} of x (long) and y (lat), an +#' \code{sf}, or \code{terra} object as input. #' @param z The zoom level to return. The zoom ranges from 1 to 14. Resolution -#' of the resultant raster is determined by the zoom and latitude. For -#' details on zoom and resolution see the documentation from Mapzen at +#' of the resultant raster is determined by the zoom and latitude. For +#' details on zoom and resolution see the documentation from Mapzen at #' \url{https://github.com/tilezen/joerd/blob/master/docs/data-sources.md#what-is-the-ground-resolution}. -#' The z is not required for the OpenTopography data sources. -#' @param prj A valid input to \code{\link{st_crs}} If a \code{sf} -#' object or a \code{terra} object is provided as the \code{locations}, -#' the prj is optional and will be taken from \code{locations}. This -#' argument is required for a \code{data.frame} of locations. -#' @param src A character indicating which API to use. Currently supports "aws" -#' and "gl3", "gl1", "alos", or "srtm15plus" from the OpenTopography API global +#' The z is not required for the OpenTopography data sources. +#' @inheritParams loc_check +#' @param src A character indicating which API to use. Currently supports "aws" +#' and "gl3", "gl1", "alos", or "srtm15plus" from the OpenTopography API global #' datasets. "aws" is the default. #' @param expand A numeric value of a distance, in map units, used to expand the -#' bounding box that is used to fetch the terrain tiles. This can -#' be used for features that fall close to the edge of a tile or -#' for retrieving additional area around the feature. If the -#' feature is a single point, the area it returns will be small if +#' bounding box that is used to fetch the terrain tiles. This can +#' be used for features that fall close to the edge of a tile or +#' for retrieving additional area around the feature. If the +#' feature is a single point, the area it returns will be small if #' clip is set to "bbox". Default is NULL. -#' @param clip A character value used to determine clipping of returned DEM. -#' The default value is "tile" which returns the full tiles. Other -#' options are "bbox" which returns the DEM clipped to the bounding -#' box of the original locations (or expanded bounding box if used), -#' or "locations" if the spatial data (e.g. polygons) in the input -#' locations should be used to clip the DEM. Locations are not used +#' @param clip A character value used to determine clipping of returned DEM. +#' The default value is "tile" which returns the full tiles. Other +#' options are "bbox" which returns the DEM clipped to the bounding +#' box of the original locations (or expanded bounding box if used), +#' or "locations" if the spatial data (e.g. polygons) in the input +#' locations should be used to clip the DEM. Locations are not used #' to clip input point datasets. Instead the bounding box is used. -#' @param verbose Toggles on and off the note about units and coordinate +#' @param verbose Toggles on and off the note about units and coordinate #' reference system. -#' @param neg_to_na Some of the data sources return large negative numbers as -#' missing data. When the end result is a projected those -#' large negative numbers can vary. When set to TRUE, only +#' @param neg_to_na Some of the data sources return large negative numbers as +#' missing data. When the end result is a projected those +#' large negative numbers can vary. When set to TRUE, only #' zero and positive values are returned. Default is FALSE. -#' @param override_size_check Boolean to override size checks. Any download +#' @param override_size_check Boolean to override size checks. Any download #' between 100 Mb and 500Mb report a message but -#' continue. Between 500Mb and 3000Mb requires +#' continue. Between 500Mb and 3000Mb requires #' interaction and greater than 3000Mb fails. These -#' can be overriden with this argument set to TRUE. -#' @param tmp_dir The location to store downloaded raster files. Defaults to a -#' temporary location. Alternatively, the user may supply an -#' existing path for these raster files. New folders are not +#' can be overriden with this argument set to TRUE. +#' @param tmp_dir The location to store downloaded raster files. Defaults to a +#' temporary location. Alternatively, the user may supply an +#' existing path for these raster files. New folders are not #' created by \code{get_elev_raster}. -#' @param ncpu Number of CPU's to use when downloading aws tiles. Defaults to 2 -#' if more than two available, 1 otherwise. -#' @param ... Extra arguments to pass to \code{httr::GET} via a named vector, +#' @param ncpu Number of CPU's to use when downloading aws tiles. Defaults to 2 +#' if more than two available, 1 otherwise. +#' @param coords Coordinate column names passed to [sf::st_as_sf()]. Defaults to +#' `c("x", "y")`. +#' @param ... Extra arguments to pass to \code{httr::GET} via a named vector, #' \code{config}. See -#' \code{\link{get_aws_terrain}} for more details. -#' @return Function returns a \code{RasterLayer} in the projection -#' specified by the \code{prj} argument or in the projection of the +#' \code{\link{get_aws_terrain}} for more details. +#' @return Function returns a \code{RasterLayer} in the projection +#' specified by the \code{prj} argument or in the projection of the #' provided locations. In subsequent versions, a \code{SpatRaster} #' will be returned. -#' @details Currently, the \code{get_elev_raster} function utilizes the -#' Amazon Web Services -#' (\url{https://registry.opendata.aws/terrain-tiles/}) terrain -#' tiles and the Open Topography Global Datasets API -#' (\url{https://opentopography.org/developers}). -#' -#' The AWS Terrain Tiles data is provided via x, y, and z tiles (see -#' \url{https://wiki.openstreetmap.org/wiki/Slippy_map_tilenames} for -#' details.) The x and y are determined from the bounding box of the -#' object submitted for \code{locations} argument, and the z argument -#' must be specified by the user. +#' @details Currently, the \code{get_elev_raster} function utilizes the +#' Amazon Web Services +#' (\url{https://registry.opendata.aws/terrain-tiles/}) terrain +#' tiles and the Open Topography Global Datasets API +#' (\url{https://opentopography.org/developers}). +#' +#' The AWS Terrain Tiles data is provided via x, y, and z tiles (see +#' \url{https://wiki.openstreetmap.org/wiki/Slippy_map_tilenames} for +#' details.) The x and y are determined from the bounding box of the +#' object submitted for \code{locations} argument, and the z argument +#' must be specified by the user. #' @export -#' @examples +#' @examples #' \dontrun{ #' library(elevatr) #' library(sf) #' data(lake) #' lake_buff <- st_buffer(lake, 1000) -#' loc_df <- data.frame(x = runif(6,min=sf::st_bbox(lake)$xmin, +#' loc_df <- data.frame(x = runif(6,min=sf::st_bbox(lake)$xmin, #' max=sf::st_bbox(lake)$xmax), -#' y = runif(6,min=sf::st_bbox(lake)$ymin, +#' y = runif(6,min=sf::st_bbox(lake)$ymin, #' max=sf::st_bbox(lake)$ymax)) -#' +#' #' x <- get_elev_raster(locations = loc_df, prj = st_crs(lake) , z=10) #' x <- get_elev_raster(lake, z = 14) #' x <- get_elev_raster(lake, src = "gl3", expand = 5000) #' x <- get_elev_raster(lake_buff, z = 10, clip = "locations") #' } -get_elev_raster <- function(locations, z, prj = NULL, +get_elev_raster <- function(locations, z, prj = NULL, src = c("aws", "gl3", "gl1", "alos", "srtm15plus"), - expand = NULL, clip = c("tile", "bbox", "locations"), - verbose = TRUE, neg_to_na = FALSE, + expand = NULL, clip = c("tile", "bbox", "locations"), + verbose = TRUE, neg_to_na = FALSE, override_size_check = FALSE, tmp_dir = tempdir(), ncpu = ifelse(future::availableCores() > 2, 2, 1), + coords = c("x", "y"), ...){ # First Check for internet if(!curl::has_internet()) { message("Please connect to the internet and try again.") return(NULL) } - + tmp_dir <- normalizePath(tmp_dir, mustWork = TRUE) src <- match.arg(src) - clip <- match.arg(clip) - + clip <- match.arg(clip) + # Check location type and if sf, set prj. If no prj (for either) then error - locations <- loc_check(locations,prj) - - if(is.null(prj)){ + locations <- loc_check(locations, prj, coords = coords) + + if (is.null(prj)) { prj <- sf::st_crs(locations) } #need to check what is going on with PRJ when no prj passed. # Check download size and provide feedback, stop if too big! dl_size <- estimate_raster_size(locations, prj, src, z) if(dl_size > 500 & dl_size < 1000){ - message(paste0("Note: Your request will download approximately ", + message(paste0("Note: Your request will download approximately ", round(dl_size, 1), "Mb.")) } else if(dl_size > 1000 & dl_size <= 3000){ message(paste0("Your request will download approximately ", @@ -125,69 +125,70 @@ get_elev_raster <- function(locations, z, prj = NULL, } } else if(!override_size_check & dl_size > 3000){ stop(paste0("Your request will download approximately ", - round(dl_size, 1), "Mb. That's probably too big. If you + round(dl_size, 1), "Mb. That's probably too big. If you really want to do this, set override_size_check = TRUE. Note that the OpenTopography API Limit will likely be exceeded.")) } - - + + # Pass of locations to APIs to get data as raster if(src == "aws") { raster_elev <- get_aws_terrain(locations, z, prj = prj, expand = expand, tmp_dir = tmp_dir, ncpu = ncpu, ...) } else if(src %in% c("gl3", "gl1", "alos", "srtm15plus")){ - raster_elev <- get_opentopo(locations, src, prj = prj, expand = expand, + raster_elev <- get_opentopo(locations, src, prj = prj, expand = expand, tmp_dir = tmp_dir, ...) } sources <- attr(raster_elev, "sources") if(is.null(sources)){sources <- src} - + if(clip != "tile"){ message(paste("Clipping DEM to", clip)) - + raster_elev <- clip_it(raster_elev, locations, expand, clip) } - - if(verbose){ + + if (verbose){ message(paste("Note: Elevation units are in meters.")) } - - - if(neg_to_na){ + + + if (neg_to_na){ raster_elev[raster_elev < 0] <- NA } - + attr(raster_elev, "sources") <- sources - #Returning raster for now - #Switch to SpatRaster in near future. - raster::raster(raster_elev) - + + if (requireNamespace("raster", quietly = TRUE)) { + # Return raster if raster package is installed + raster::raster(raster_elev) + } else { + # Otherwise return SpatRaster + terra::rast(raster_elev) + } } #' Get a digital elevation model from the AWS Terrain Tiles -#' +#' #' This function uses the AWS Terrain Tile service to retrieve an elevation -#' raster from the geotiff service. It accepts a \code{sf::st_bbox} object as -#' input and returns a single raster object covering that extent. -#' -#' @source Attribution: Mapzen terrain tiles contain 3DEP, SRTM, and GMTED2010 -#' content courtesy of the U.S. Geological Survey and ETOPO1 content -#' courtesy of U.S. National Oceanic and Atmospheric Administration. -#' \url{https://github.com/tilezen/joerd/tree/master/docs} -#' -#' @param locations Either a \code{data.frame} of x (long) and y (lat), an +#' raster from the geotiff service. It accepts a \code{sf::st_bbox} object as +#' input and returns a single raster object covering that extent. +#' +#' @source Attribution: Mapzen terrain tiles contain 3DEP, SRTM, and GMTED2010 +#' content courtesy of the U.S. Geological Survey and ETOPO1 content +#' courtesy of U.S. National Oceanic and Atmospheric Administration. +#' \url{https://github.com/tilezen/joerd/tree/master/docs} +#' +#' @param locations Either a \code{data.frame} of x (long) and y (lat), an #' \code{sp}, \code{sf}, or \code{raster} object as input. #' @param z The zoom level to return. The zoom ranges from 1 to 14. Resolution -#' of the resultant raster is determined by the zoom and latitude. For -#' details on zoom and resolution see the documentation from Mapzen at +#' of the resultant raster is determined by the zoom and latitude. For +#' details on zoom and resolution see the documentation from Mapzen at #' \url{https://github.com/tilezen/joerd/blob/master/docs/data-sources.md#what-is-the-ground-resolution} -#' @param prj A valid input to \code{\link{st_crs}} If a \code{sf} -#' object or a \code{terra} object is provided as the \code{locations}, -#' the prj is optional and will be taken from \code{locations}. This -#' argument is required for a \code{data.frame} of locations. +#' @inheritParams loc_check #' @param expand A numeric value of a distance, in map units, used to expand the -#' bounding box that is used to fetch the terrain tiles. This can -#' be used for features that fall close to the edge of a tile and +#' bounding box that is used to fetch the terrain tiles. This can +#' be used for features that fall close to the edge of a tile and #' additional area around the feature is desired. Default is NULL. #' @param ncpu Number of CPU's to use when downloading aws tiles. Defaults to 2 #' if more than two available, 1 otherwise. @@ -197,11 +198,11 @@ get_elev_raster <- function(locations, z, prj = NULL, #' temporary location. Alternatively, the user may supply an #' existing path for these raster files. New folders are not #' created by \code{get_elev_raster}. -#' @param ... Extra configuration parameters to be passed to httr::GET. Common -#' usage is to adjust timeout. This is done as -#' \code{config=timeout(x)} where \code{x} is a numeric value in -#' seconds. Multiple configuration functions may be passed as a -#' vector. +#' @param ... Extra configuration parameters to be passed to httr::GET. Common +#' usage is to adjust timeout. This is done as +#' \code{config=timeout(x)} where \code{x} is a numeric value in +#' seconds. Multiple configuration functions may be passed as a +#' vector. #' @export #' @importFrom progressr handlers progressor with_progress #' @keywords internal @@ -210,26 +211,26 @@ get_aws_terrain <- function(locations, z, prj, expand=NULL, ncpu = ifelse(future::availableCores() > 2, 2, 1), serial = NULL, tmp_dir = tempdir(), ...){ # Expand (if needed) and re-project bbx to dd - + bbx <- proj_expand(locations,prj,expand) - + base_url <- "https://s3.amazonaws.com/elevation-tiles-prod/geotiff" - - + + tiles <- get_tilexy(bbx,z) - + urls <- sprintf("%s/%s/%s/%s.tif", base_url, z, tiles[,1], tiles[,2]) - + for(i in urls){ if(httr::http_error(i)) { message("An AWS URL is invalid.") return(NULL) } } - - + + dir <- tempdir() - + nurls <- length(urls) if(is.null(serial)){ if(nurls < 175){ @@ -238,31 +239,31 @@ get_aws_terrain <- function(locations, z, prj, expand=NULL, serial <- FALSE } } - + progressr::handlers( progressr::handler_progress( format = " Accessing raster elevation [:bar] :percent", - clear = FALSE, + clear = FALSE, width= 60 )) - + progressr::with_progress({ if(serial){ - + p <- progressr::progressor(along = urls) dem_list <- purrr::map(urls, function(x){ p() - tmpfile <- tempfile(tmpdir = tmp_dir, + tmpfile <- tempfile(tmpdir = tmp_dir, fileext = ".tif") - resp <- httr::GET(x, - httr::user_agent("elevatr R package (https://github.com/jhollist/elevatr)"), + resp <- httr::GET(x, + httr::user_agent("elevatr R package (https://github.com/usepa/elevatr)"), httr::write_disk(tmpfile,overwrite=TRUE), ...) if (!grepl("image/tif", httr::http_type(resp))) { stop(paste("This url:", x,"did not return a tif"), call. = FALSE) - } + } tmpfile2 <- tmpfile - attr(tmpfile2, "source") <- + attr(tmpfile2, "source") <- httr::headers(resp)$'x-amz-meta-x-imagery-sources' tmpfile2 }) @@ -273,20 +274,20 @@ get_aws_terrain <- function(locations, z, prj, expand=NULL, function(x){ p() tmpfile <- tempfile(tmpdir = tmp_dir, fileext = ".tif") - resp <- httr::GET(x, - httr::user_agent("elevatr R package (https://github.com/jhollist/elevatr)"), + resp <- httr::GET(x, + httr::user_agent("elevatr R package (https://github.com/usepa/elevatr)"), httr::write_disk(tmpfile,overwrite=TRUE), ...) if (!grepl("image/tif", httr::http_type(resp))) { stop(paste("This url:", x,"did not return a tif"), call. = FALSE) - } + } tmpfile2 <- tmpfile - attr(tmpfile2, "source") <- + attr(tmpfile2, "source") <- httr::headers(resp)$'x-amz-meta-x-imagery-sources' tmpfile2 }) } }) - + merged_elevation_grid <- merge_rasters(dem_list, target_prj = prj, tmp_dir = tmp_dir) sources <- unlist(lapply(dem_list, function(x) attr(x, "source"))) if(!is.null(sources)){ @@ -294,59 +295,59 @@ get_aws_terrain <- function(locations, z, prj, expand=NULL, sources <- strsplit(sources, "/") sources <- unlist(unique(lapply(sources, function(x) x[1]))) } - attr(merged_elevation_grid, "sources") <- + attr(merged_elevation_grid, "sources") <- paste(sources, collapse = ",") - + if(serial==FALSE){future::plan(future::sequential)} - - merged_elevation_grid + + merged_elevation_grid } #' Merge Rasters -#' -#' Merge multiple downloaded raster files into a single file. The input `target_prj` +#' +#' Merge multiple downloaded raster files into a single file. The input `target_prj` #' describes the projection for the new grid. -#' +#' #' @param raster_list a list of raster file paths to be mosaiced #' @param target_prj the target projection of the output raster -#' @param method the method for resampling/reprojecting. Default is 'bilinear'. +#' @param method the method for resampling/reprojecting. Default is 'bilinear'. #' Options can be found [here](https://gdal.org/programs/gdalwarp.html#cmdoption-gdalwarp-r) #' @param returnRaster if TRUE, return a raster object (default), else, return the file path to the object -#' @param tmp_dir The location to store downloaded raster files. Defaults to a -#' temporary location. Alternatively, the user may supply an -#' existing path for these raster files. New folders are not +#' @param tmp_dir The location to store downloaded raster files. Defaults to a +#' temporary location. Alternatively, the user may supply an +#' existing path for these raster files. New folders are not #' created by \code{get_elev_raster}. #' @export #' @keywords internal - -merge_rasters <- function(raster_list, target_prj, method = "bilinear", + +merge_rasters <- function(raster_list, target_prj, method = "bilinear", returnRaster = TRUE, tmp_dir = tempdir()){ - + message(paste("Mosaicing & Projecting")) - + destfile <- tempfile(tmpdir = tmp_dir, fileext = ".tif") files <- unlist(raster_list) - + if(is.null(target_prj)){ r <- terra::rast(files[1]) target_prj <- terra::crs(r) } - - sf::gdal_utils(util = "warp", - source = files, + + sf::gdal_utils(util = "warp", + source = files, destination = destfile, options = c("-r", method) ) # Using two steps now as gdal with one step introduced NA's along seams # Slower but more accurate! destfile2 <- tempfile(tmpdir = tmp_dir, fileext = ".tif") - sf::gdal_utils(util = "warp", - source = destfile, + sf::gdal_utils(util = "warp", + source = destfile, destination = destfile2, options = c("-r", method, "-t_srs", sf::st_crs(target_prj)$wkt) ) - + if(returnRaster){ terra::rast(destfile2) } else { @@ -355,38 +356,35 @@ merge_rasters <- function(raster_list, target_prj, method = "bilinear", } #' Get a digital elevation model from the Open Topography SRTM Version 3 -#' -#' This function uses the Open Topography SRTM Version 3 files. -#' +#' +#' This function uses the Open Topography SRTM Version 3 files. +#' #' @source Attribution: Details here -#' -#' @param locations Either a \code{data.frame} of x (long) and y (lat), an -#' \code{sp}, an \code{sf}, or \code{raster} object as input. -#' @param prj A valid input to \code{\link{st_crs}} If a \code{sf} -#' object or a \code{terra} object is provided as the \code{locations}, -#' the prj is optional and will be taken from \code{locations}. This -#' argument is required for a \code{data.frame} of locations. +#' +#' @param locations Either a \code{data.frame} of x (long) and y (lat), an +#' \code{sp}, an \code{sf}, or \code{raster} object as input. +#' @inheritParams loc_check #' @param expand A numeric value of a distance, in map units, used to expand the -#' bounding box that is used to fetch the SRTM data. -#' @param tmp_dir The location to store downloaded raster files. Defaults to a -#' temporary location. Alternatively, the user may supply an -#' existing path for these raster files. New folders are not +#' bounding box that is used to fetch the SRTM data. +#' @param tmp_dir The location to store downloaded raster files. Defaults to a +#' temporary location. Alternatively, the user may supply an +#' existing path for these raster files. New folders are not #' created by \code{get_elev_raster}. -#' @param ... Extra configuration parameters to be passed to httr::GET. Common -#' usage is to adjust timeout. This is done as -#' \code{config=timeout(x)} where \code{x} is a numeric value in -#' seconds. Multiple configuration functions may be passed as a -#' vector. +#' @param ... Extra configuration parameters to be passed to httr::GET. Common +#' usage is to adjust timeout. This is done as +#' \code{config=timeout(x)} where \code{x} is a numeric value in +#' seconds. Multiple configuration functions may be passed as a +#' vector. #' @export #' @keywords internal get_opentopo <- function(locations, src, prj, expand=NULL, tmp_dir = tempdir(), ...){ - + api_key <- get_opentopo_key() - + # Expand (if needed) and re-project bbx to ll_geo bbx <- proj_expand(locations,prj,expand) - + tmpfile <- tempfile(tmpdir = tmp_dir) base_url <- "https://portal.opentopography.org/API/globaldem?demtype=" data_set <- switch(src, @@ -394,7 +392,7 @@ get_opentopo <- function(locations, src, prj, expand=NULL, tmp_dir = tempdir(), gl1 = "SRTMGL1", alos = "AW3D30", srtm15plus = "SRTM15Plus") - + url <- paste0(base_url, data_set, "&west=",min(bbx["xmin"]), "&south=",min(bbx["ymin"]), @@ -402,43 +400,43 @@ get_opentopo <- function(locations, src, prj, expand=NULL, tmp_dir = tempdir(), "&north=",max(bbx["ymax"]), "&outputFormat=GTiff", "&API_Key=", api_key) - + if(httr::http_error(url)) { message("The OpenTopography URL is invalid.") return(NULL) } - + message("Downloading OpenTopography DEMs") - resp <- httr::GET(url,httr::write_disk(tmpfile,overwrite=TRUE), - httr::user_agent("elevatr R package (https://github.com/jhollist/elevatr)"), + resp <- httr::GET(url,httr::write_disk(tmpfile,overwrite=TRUE), + httr::user_agent("elevatr R package (https://github.com/usepa/elevatr)"), httr::progress(), ...) message("") if (httr::http_type(resp) != "application/octet-stream") { stop("API did not return octet-stream as expected", call. = FALSE) - } + } dem <- merge_rasters(tmpfile, target_prj = prj, tmp_dir = tmp_dir) dem } #' Store OpenTopography Key -#' +#' #' This function stores an OpenTopgrapy key in a local .Renviron file. If the -#' .Renviron file exists, the key will be appended. This will typically only -#' need to be done once per machine. -#' -#' -#' @param key An OpenTopography API Key as a character. For details on obtaining an -#' OpenTopgraphy key see \url{https://opentopography.org/blog/introducing-api-keys-access-opentopography-global-datasets}. +#' .Renviron file exists, the key will be appended. This will typically only +#' need to be done once per machine. +#' +#' +#' @param key An OpenTopography API Key as a character. For details on obtaining an +#' OpenTopgraphy key see \url{https://opentopography.org/blog/introducing-api-keys-access-opentopography-global-datasets}. #' @export set_opentopo_key <- function(key){ home <- normalizePath("~/") - if(Sys.getenv("OPENTOPO_KEY")==""){ - cat(paste0("OPENTOPO_KEY=", key, "\n"), file = paste0(home, "/.Renviron"), + if(Sys.getenv("OPENTOPO_KEY")==""){ + cat(paste0("OPENTOPO_KEY=", key, "\n"), file = paste0(home, "/.Renviron"), append = TRUE) message("Your OpenTopography Key has been added to .Renviron. You will need to restart R for the changes to take effect.") } else { message("An existing OpenTopography Key already exists. To edit try usethis::edit_r_environ().") - } -} \ No newline at end of file + } +} diff --git a/R/internal.R b/R/internal.R index 0ec0391..f86621d 100644 --- a/R/internal.R +++ b/R/internal.R @@ -21,8 +21,8 @@ get_tilexy <- function(bbx,z){ max_tile <- unlist(slippymath::lonlat_to_tilenum(bbx["xmax"],bbx["ymax"],z)) x_all <- seq(from = floor(min_tile[1]), to = floor(max_tile[1])) y_all <- seq(from = floor(min_tile[2]), to = floor(max_tile[2])) - - + + if(z == 1){ x_all <- x_all[x_all<2] y_all <- y_all[y_all<2] @@ -30,123 +30,165 @@ get_tilexy <- function(bbx,z){ x_all <- x_all[x_all<1] y_all <- y_all[y_all<1] } - - + + return(expand.grid(x_all,y_all)) } - -#' function to check input type and projection. All input types convert to a -#' SpatialPointsDataFrame for point elevation and bbx for raster. +#' function sf, sfc, or sfg with LINESTRING geometry to POINT geometry +#' +#' Optionally use [sf::st_line_sample()] when n, density, or sample are +#' supplied. +#' +#' @param locations A sf, sfc, or sfg object with LINESTRING geometry. +#' @inheritParams sf::st_line_sample #' @keywords internal -loc_check <- function(locations, prj = NULL){ - - if(is.null(nrow(locations))){ - nfeature <- length(locations) - } else { - nfeature <- nrow(locations) +loc_linestring_to_point <- function( + locations, + n = NULL, + density = NULL, + type = "regular", + sample = NULL) { + stopifnot( + sf::st_is(locations, "LINESTRING") + ) + + if (is.numeric(c(n, density, sample))) { + locations <- sf::st_line_sample( + locations, + n = n, density = density, + sample = sample, + type = type + ) + } + + sf::st_cast(locations, to = "POINT") +} + +#' Get length of vector or nrow for data frame input +#' @noRd +loc_length <- function(locations) { + if (is.data.frame(locations)) { + return(nrow(locations)) } - - if(all(class(locations)=="data.frame")){ - if(is.null(prj) & !any(class(locations) %in% c("sf", "sfc", "sfg"))){ - stop("Please supply a valid sf crs via locations or prj.") + + length(locations) +} + +#' function to check and prepare input locations +#' +#' All input types convert to a sf data frame. +#' +#' @param prj A valid input to \code{\link{st_crs}} If a \code{sf} +#' object or a \code{terra} object is provided as the \code{locations}, +#' the prj is optional and will be taken from \code{locations}. This +#' argument is required for a \code{data.frame} of locations. +#' @inheritParams sf::st_as_sf +#' @param elev_col Elevation column name. +#' @keywords internal +loc_check <- function(locations, + prj = NULL, + coords = c("x", "y"), + elev_col = "elevation") { + if (is.data.frame(locations) && !inherits(locations, "sf")) { + if (is.null(prj)) { + stop("Please supply a valid crs via locations or prj.") } - - locations <- sf::st_as_sf(x = locations, coords = c("x", "y"), crs = prj) - locations$elevation <- vector("numeric", nfeature) - - } else if(any(class(locations) %in% c("sf", "sfc", "sfg"))){ - + + stopifnot( + "`locations` must contain column names matching `coords`" = all(coords %in% names(locations)) + ) + + locations <- sf::st_as_sf(x = locations, coords = coords, crs = prj) + } else if (inherits(locations, c("sf", "sfc", "sfg"))) { sf_crs <- sf::st_crs(locations) - locations$elevation <- vector("numeric", nfeature) - - if((is.null(sf_crs) | is.na(sf_crs)) & is.null(prj)){ - stop("Please supply an sf object with a valid crs.") + if ((is.null(sf_crs) || is.na(sf_crs)) && is.null(prj)) { + stop("Please supply an sf or sfc object with a valid crs.") + } + + if (inherits(locations, "sfg")) { + locations <- sf::st_sfc(locations, crs = prj) + } + + if (!inherits(locations, "sf")) { + locations <- sf::st_as_sf(locations) } - - } else if(any(class(locations) %in% c("SpatRaster", "SpatVector"))){ - + + } else if (any(class(locations) %in% c("SpatRaster", "SpatVector"))) { sf_crs <- sf::st_crs(locations) - locations <- sf::st_as_sf(terra::as.points(locations, values = FALSE), - coords = terra::crds(locations, df = TRUE), - crs = sf_crs) - locations$elevation <- vector("numeric", nrow(locations)) - if((is.null(sf_crs) | is.na(sf_crs)) & is.null(prj)){ - stop("Please supply a valid sf crs via locations or prj.") + coords <- terra::crds(locations, df = TRUE) + + if (inherits(locations, "SpatVector")) { + locations <- terra::as.points(locations) + } else { + locations <- terra::as.points(locations, values = FALSE) + } + + locations <- sf::st_as_sf(locations, coords = coords, crs = sf_crs) + + if ((is.null(sf_crs) || is.na(sf_crs)) && is.null(prj)) { + stop("Please supply a valid crs via locations or prj.") } } - #check for long>180 - if(is.null(prj)){ - prj_test <- sf::st_crs(locations) + nfeature <- loc_length(locations) + locations[[elev_col]] <- vector("numeric", nfeature) + + # check for long>180 + if (!is.null(prj)) { + lll <- sf::st_is_longlat(prj) } else { - prj_test <- prj - } - - - lll <- sf::st_is_longlat(prj_test) - - if(lll){ - if(any(sf::st_coordinates(locations)[,1]>180)){ - stop("The elevatr package requires longitude in a range from -180 to 180.") - } + lll <- sf::st_is_longlat(locations) } - -locations -} + if (lll && any(sf::st_coordinates(locations)[, 1] > 180)) { + stop("The elevatr package requires longitude in a range from -180 to 180.") + } + locations +} #' function to project bounding box and if needed expand it #' @keywords internal -proj_expand <- function(locations,prj,expand){ - +proj_expand <- function(locations, prj, expand = NULL) { lll <- sf::st_is_longlat(prj) - #any(grepl("\\bGEOGCRS\\b",sf::st_crs(prj)) | - # grepl("\\bGEODCRS\\b", sf::st_crs(prj)) | - # grepl("\\bGEODETICCRS\\b", sf::st_crs(prj)) | - # grepl("\\bGEOGRAPHICCRS\\b", sf::st_crs(prj)) | - # grepl("\\blonglat\\b", sf::st_crs(prj)) | - # grepl("\\blatlong\\b", sf::st_crs(prj))) - - if(is.null(nrow(locations))){ - nfeature <- length(locations) - } else { - nfeature <- nrow(locations) - } - - if(any(sf::st_bbox(locations)[c("ymin","ymax")] == 0) & lll & is.null(expand)){ + nfeature <- loc_length(locations) + single_pt <- nfeature == 1 && is.null(expand) + bbx <- sf::st_bbox(locations) + + if (any(bbx[c("ymin","ymax")] == 0) && lll && is.null(expand)) { # Edge case for lat exactly at the equator - was returning NA expand <- 0.01 - } else if(nfeature == 1 & lll & is.null(expand)){ + } else if (single_pt && lll) { # Edge case for single point and lat long expand <- 0.01 - } else if(nfeature == 1 & is.null(expand)){ + } else if (single_pt) { # Edge case for single point and projected # set to 1000 meters unit <- sf::st_crs(sf::st_as_sf(locations), parameters = TRUE)$ud_unit - expand <- units::set_units(units::set_units(1000, "m"), unit, - mode = "standard") + + expand <- units::set_units( + units::set_units(1000, "m"), + unit, + mode = "standard" + ) expand <- as.numeric(expand) } - if(!is.null(expand)){ - - bbx <- sf::st_bbox(locations) + c(-expand, -expand, expand, expand) - } else { - bbx <- sf::st_bbox(locations) + bbx <- bbx + c(-expand, -expand, expand, expand) } - bbx <- bbox_to_sf(bbx, prj = prj) - bbx <- sf::st_bbox(sf::st_transform(bbx, crs = ll_geo)) + bbx_sf <- bbox_to_sf(bbx, prj = prj) + bbx <- sf::st_bbox(sf::st_transform(bbx_sf, crs = prj)) bbx_coord_check <- as.numeric(bbx) + if(any(!bbx_coord_check >= -180 & bbx_coord_check <= 360)){ stop("The elevatr package requires longitude in a range from -180 to 180.") - } + } bbx - + #sf expand - save for later #loc_sf <- sf::st_as_sf(locations) #loc_bbx <- sf::st_bbox(loc_sf) @@ -188,8 +230,8 @@ bbox_to_sf <- function(bbox, prj = 4326) { #' @param z zoom level if source is aws #' @keywords internal estimate_raster_size <- function(locations, prj, src, z = NULL){ - - locations <- bbox_to_sf(sf::st_bbox(locations), + + locations <- bbox_to_sf(sf::st_bbox(locations), prj = prj) locations <- sf::st_transform(locations, crs = 4326) @@ -199,14 +241,14 @@ estimate_raster_size <- function(locations, prj, src, z = NULL){ # Convert ground res to dd # zoom level 0 = 156543 meters 156543/111319.9 # old resolution (no idea how I calculated these...) - # c(0.54905236, 0.27452618, 0.15455633, 0.07145545, 0.03719130, 0.01901903, - # 0.00962056, 0.00483847, 0.00241219, 0.00120434, 0.00060173, 0.00030075, + # c(0.54905236, 0.27452618, 0.15455633, 0.07145545, 0.03719130, 0.01901903, + # 0.00962056, 0.00483847, 0.00241219, 0.00120434, 0.00060173, 0.00030075, # 0.00015035, 0.00007517, 0.00003758) - m_at_equator <- c(156543.0, 78271.5, 39135.8, 19567.9, 9783.9, 4892.0, 2446.0, - 1223.0, 611.5, 305.7, 152.9, 76.4, 38.2, 19.1, 9.6, 4.8, + m_at_equator <- c(156543.0, 78271.5, 39135.8, 19567.9, 9783.9, 4892.0, 2446.0, + 1223.0, 611.5, 305.7, 152.9, 76.4, 38.2, 19.1, 9.6, 4.8, 2.4) z_res <- data.frame(z = 0:16, res_dd = m_at_equator/111319.9) - + bits <- switch(src, aws = 32, gl3 = 32, @@ -220,20 +262,20 @@ estimate_raster_size <- function(locations, prj, src, z = NULL){ gl3 = 0.0008333, gl1 = 0.0002778, alos = 0.0002778, - srtm15plus = 0.004165) + srtm15plus = 0.004165) } num_rows <- (sf::st_bbox(locations)$xmax - sf::st_bbox(locations)$xmin)/res num_cols <- (sf::st_bbox(locations)$ymax - sf::st_bbox(locations)$ymin)/res - + num_megabytes <- (num_rows * num_cols * bits)/8388608 num_megabytes } #' OpenTopo Key -#' +#' #' The OpenTopography API now requires an API Key. This function will grab your #' key from an .Renviron file -#' +#' #' @keywords internal get_opentopo_key <- function(){ if(Sys.getenv("OPENTOPO_KEY")==""){ @@ -241,4 +283,72 @@ get_opentopo_key <- function(){ Please use elevatr::set_opentopo_key().") } Sys.getenv("OPENTOPO_KEY") -} \ No newline at end of file +} + +#' Make sf column the last column in a sf data frame +#' @keywords internal +relocate_sf_col_end <- function(x) { + cols <- c(setdiff(names(x), attr(x, "sf_column")), attr(x, "sf_column")) + x[,cols, drop = FALSE] +} + +#' Get distances between successive pairs of points +#' @inheritDotParams sf::st_distance -x -y +#' @keywords internal +st_point_distances <- function(x, + cumulative = TRUE, + units = NULL, + prj = sf::st_crs(x), + ...) { + stopifnot( + inherits(x, c("sfc", "sf")), + all(sf::st_is(x, "POINT")) + ) + + points <- x + + if (inherits(x, "sf")) { + points <- sf::st_geometry(x) + } + + point_list <- lapply( + points, + \(x) { + sf::st_sfc(x, crs = prj) + } + ) + + dist_points <- purrr::reduce( + seq_along(point_list), + \(x, y) { + if (y == length(point_list)) { + return(x) + } + + c( + x, + sf::st_distance( + x = point_list[[y]], + y = point_list[[y + 1]], + ... + ) + ) + }, + .init = 0 + ) + + if (cumulative) { + dist_points <- cumsum(dist_points) + } + + # TODO: Add handling for non character units values + if (!is.null(units)) { + dist_points <- units::set_units( + dist_points, + value = units, + mode = "standard" + ) + } + + dist_points +} diff --git a/man/get_aws_points.Rd b/man/get_aws_points.Rd index 2e8061c..a14361b 100644 --- a/man/get_aws_points.Rd +++ b/man/get_aws_points.Rd @@ -10,37 +10,39 @@ get_aws_points( units = c("meters", "feet"), ncpu = ifelse(future::availableCores() > 2, 2, 1), verbose = TRUE, + elev_col = "elevation", ... ) } \arguments{ -\item{locations}{Either a \code{data.frame} with x (e.g. longitude) as the -first column and y (e.g. latitude) as the second column, a -\code{SpatialPoints}/\code{SpatialPointsDataFrame}, or a -\code{sf} \code{POINT} or \code{MULTIPOINT} object. -Elevation for these points will be returned in the +\item{locations}{Either a \code{data.frame} with x (e.g. longitude) as the +first column and y (e.g. latitude) as the second column, a +\code{SpatialPoints}/\code{SpatialPointsDataFrame}, or a +\code{sf} \code{POINT} or \code{MULTIPOINT} object. +Elevation for these points will be returned in the originally supplied class.} \item{z}{The zoom level to return. The zoom ranges from 1 to 14. Resolution -of the resultant raster is determined by the zoom and latitude. For -details on zoom and resolution see the documentation from Mapzen at -\url{https://github.com/tilezen/joerd/blob/master/docs/data-sources.md#what-is-the-ground-resolution}. +of the resultant raster is determined by the zoom and latitude. For +details on zoom and resolution see the documentation from Mapzen at +\url{https://github.com/tilezen/joerd/blob/master/docs/data-sources.md#what-is-the-ground-resolution}. default value is 5 is supplied.} -\item{units}{Character string of either meters or feet. Conversions for -'aws' are handled in R as the AWS terrain tiles are served in +\item{units}{Character string of either meters or feet. Conversions for +'aws' are handled in R as the AWS terrain tiles are served in meters.} -\item{ncpu}{Number of CPU's to use when downloading aws tiles. Defaults to 2 +\item{ncpu}{Number of CPU's to use when downloading aws tiles. Defaults to 2 if more than two available, 1 otherwise.} -\item{verbose}{Report back messages.} +\item{verbose}{Toggles on and off the note about units and coordinate +reference system.} \item{...}{Arguments to be passed to \code{get_elev_raster}} } \value{ -a list with a SpatialPointsDataFrame or sf POINT or MULTIPOINT object with - elevation added to the data slot and a character of the elevation units +a list with a SpatialPointsDataFrame or sf POINT or MULTIPOINT object with +elevation added to the data slot and a character of the elevation units } \description{ Function for accessing elevation data from AWS and extracting the elevations diff --git a/man/get_aws_terrain.Rd b/man/get_aws_terrain.Rd index 3bb8dc2..92f0308 100644 --- a/man/get_aws_terrain.Rd +++ b/man/get_aws_terrain.Rd @@ -4,10 +4,10 @@ \alias{get_aws_terrain} \title{Get a digital elevation model from the AWS Terrain Tiles} \source{ -Attribution: Mapzen terrain tiles contain 3DEP, SRTM, and GMTED2010 - content courtesy of the U.S. Geological Survey and ETOPO1 content - courtesy of U.S. National Oceanic and Atmospheric Administration. - \url{https://github.com/tilezen/joerd/tree/master/docs} +Attribution: Mapzen terrain tiles contain 3DEP, SRTM, and GMTED2010 +content courtesy of the U.S. Geological Survey and ETOPO1 content +courtesy of U.S. National Oceanic and Atmospheric Administration. +\url{https://github.com/tilezen/joerd/tree/master/docs} } \usage{ get_aws_terrain( @@ -22,44 +22,44 @@ get_aws_terrain( ) } \arguments{ -\item{locations}{Either a \code{data.frame} of x (long) and y (lat), an +\item{locations}{Either a \code{data.frame} of x (long) and y (lat), an \code{sp}, \code{sf}, or \code{raster} object as input.} \item{z}{The zoom level to return. The zoom ranges from 1 to 14. Resolution -of the resultant raster is determined by the zoom and latitude. For -details on zoom and resolution see the documentation from Mapzen at +of the resultant raster is determined by the zoom and latitude. For +details on zoom and resolution see the documentation from Mapzen at \url{https://github.com/tilezen/joerd/blob/master/docs/data-sources.md#what-is-the-ground-resolution}} -\item{prj}{A valid input to \code{\link{st_crs}} If a \code{sf} -object or a \code{terra} object is provided as the \code{locations}, -the prj is optional and will be taken from \code{locations}. This +\item{prj}{A valid input to \code{\link{st_crs}} If a \code{sf} +object or a \code{terra} object is provided as the \code{locations}, +the prj is optional and will be taken from \code{locations}. This argument is required for a \code{data.frame} of locations.} \item{expand}{A numeric value of a distance, in map units, used to expand the -bounding box that is used to fetch the terrain tiles. This can -be used for features that fall close to the edge of a tile and +bounding box that is used to fetch the terrain tiles. This can +be used for features that fall close to the edge of a tile and additional area around the feature is desired. Default is NULL.} \item{ncpu}{Number of CPU's to use when downloading aws tiles. Defaults to 2 if more than two available, 1 otherwise.} -\item{serial}{Logical to determine if API should be hit in serial or in +\item{serial}{Logical to determine if API should be hit in serial or in parallel. TRUE will use purrr, FALSE will use furrr.} -\item{tmp_dir}{The location to store downloaded raster files. Defaults to a -temporary location. Alternatively, the user may supply an -existing path for these raster files. New folders are not +\item{tmp_dir}{The location to store downloaded raster files. Defaults to a +temporary location. Alternatively, the user may supply an +existing path for these raster files. New folders are not created by \code{get_elev_raster}.} -\item{...}{Extra configuration parameters to be passed to httr::GET. Common -usage is to adjust timeout. This is done as -\code{config=timeout(x)} where \code{x} is a numeric value in -seconds. Multiple configuration functions may be passed as a +\item{...}{Extra configuration parameters to be passed to httr::GET. Common +usage is to adjust timeout. This is done as +\code{config=timeout(x)} where \code{x} is a numeric value in +seconds. Multiple configuration functions may be passed as a vector.} } \description{ This function uses the AWS Terrain Tile service to retrieve an elevation -raster from the geotiff service. It accepts a \code{sf::st_bbox} object as +raster from the geotiff service. It accepts a \code{sf::st_bbox} object as input and returns a single raster object covering that extent. } \keyword{internal} diff --git a/man/get_elev_point.Rd b/man/get_elev_point.Rd index fa5dc89..38441cd 100644 --- a/man/get_elev_point.Rd +++ b/man/get_elev_point.Rd @@ -9,55 +9,69 @@ get_elev_point( prj = NULL, src = c("epqs", "aws"), ncpu = ifelse(future::availableCores() > 2, 2, 1), + coords = c("x", "y"), overwrite = FALSE, - ... + ..., + units = c("meters", "feet"), + elev_col = "elevation", + elev_units_col = "elev_units" ) } \arguments{ -\item{locations}{Either a \code{data.frame} with x (e.g. longitude) as the -first column and y (e.g. latitude) as the second column, a -\code{SpatialPoints}/\code{SpatialPointsDataFrame}, or a -\code{sf} \code{POINT} or \code{MULTIPOINT} object. -Elevation for these points will be returned in the +\item{locations}{Either a \code{data.frame} with x (e.g. longitude) as the +first column and y (e.g. latitude) as the second column, a +\code{SpatialPoints}/\code{SpatialPointsDataFrame}, or a +\code{sf} \code{POINT} or \code{MULTIPOINT} object. +Elevation for these points will be returned in the originally supplied class.} -\item{prj}{A valid input to \code{\link{st_crs}}. This +\item{prj}{A valid input to \code{\link{st_crs}}. This argument is required for a \code{data.frame} of locations and optional for \code{sf} locations.} -\item{src}{A character indicating which API to use, either "epqs" or "aws" -accepted. The "epqs" source is relatively slow for larger numbers -of points (e.g. > 500). The "aws" source may be quicker in these -cases provided the points are in a similar geographic area. The +\item{src}{A character indicating which API to use, either "epqs" or "aws" +accepted. The "epqs" source is relatively slow for larger numbers +of points (e.g. > 500). The "aws" source may be quicker in these +cases provided the points are in a similar geographic area. The "aws" source downloads a DEM using \code{get_elev_raster} and then extracts the elevation for each point.} -\item{ncpu}{Number of CPU's to use when downloading aws tiles. Defaults to 2 +\item{ncpu}{Number of CPU's to use when downloading aws tiles. Defaults to 2 if more than two available, 1 otherwise.} -\item{overwrite}{A logical indicating that existing \code{elevation} and -\code{elev_units} columns should be overwritten. Default is -FALSE and \code{get_elev_point} will error if these columns +\item{coords}{in case of point data: names or numbers of the numeric columns holding coordinates} + +\item{overwrite}{A logical indicating that existing \code{elevation} and +\code{elev_units} columns should be overwritten. Default is +FALSE and \code{get_elev_point} will error if these columns already exist.} -\item{...}{Additional arguments passed to get_epqs or get_aws_points. When -using "aws" as the source, pay attention to the `z` argument. A -defualt of 5 is used, but this uses a raster with a large ~4-5 km -pixel. Additionally, the source data changes as zoom levels -increase. -Read \url{https://github.com/tilezen/joerd/blob/master/docs/data-sources.md#what-is-the-ground-resolution} +\item{...}{Additional arguments passed to get_epqs or get_aws_points. When +using "aws" as the source, pay attention to the \code{z} argument. A +default of 5 is used, but this uses a raster with a large ~4-5 km +pixel. Additionally, the source data changes as zoom levels +increase. +Read \url{https://github.com/tilezen/joerd/blob/master/docs/data-sources.md#what-is-the-ground-resolution} for details.} + +\item{units}{Default: \code{c("meters", "feet")}. Set to "meters" by default. Any +character string that is a valid distance unit supported by +\code{units::set_units()}.} + +\item{elev_col}{Elevation column name.} + +\item{elev_units_col}{Elevation units column.} } \value{ -Function returns an \code{sf} object in the projection specified by - the \code{prj} argument. +Function returns an \code{sf} object in the projection specified by +the \code{prj} argument. } \description{ -This function provides access to point elevations using either the USGS -Elevation Point Query Service (US Only) or by extracting point elevations -from the AWS Terrain Tiles. The function accepts a \code{data.frame} of x -(long) and y (lat) or a \code{sf} \code{POINT} or \code{MULTIPOINT} object as -input. A \code{sf} \code{POINT} or \code{MULTIPOINT} object is returned with +This function provides access to point elevations using either the USGS +Elevation Point Query Service (US Only) or by extracting point elevations +from the AWS Terrain Tiles. The function accepts a \code{data.frame} of x +(long) and y (lat) or a \code{sf} \code{POINT} or \code{MULTIPOINT} object as +input. A \code{sf} \code{POINT} or \code{MULTIPOINT} object is returned with elevation and elevation units as an added \code{data.frame}. } \examples{ @@ -66,8 +80,8 @@ library(elevatr) library(sf) library(terra) -mts <- data.frame(x = c(-71.3036, -72.8145), - y = c(44.2700, 44.5438), +mts <- data.frame(x = c(-71.3036, -72.8145), + y = c(44.2700, 44.5438), names = c("Mt. Washington", "Mt. Mansfield")) ll_prj <- 4326 mts_sf <- st_as_sf(x = mts, coords = c("x", "y"), crs = ll_prj) @@ -84,13 +98,13 @@ get_elev_point(locations = mts_raster_loc) # Code to split into a loop and grab point at a time. -# This is usually faster for points that are spread apart - +# This is usually faster for points that are spread apart + library(dplyr) elev <- vector("numeric", length = nrow(mts)) for(i in seq_along(mts)){ -elev[i]<-get_elev_point(locations = mts[i,], prj = ll_prj, src = "aws", +elev[i]<-get_elev_point(locations = mts[i,], prj = ll_prj, src = "aws", z = 10)$elevation} mts_elev <- cbind(mts, elev) mts_elev diff --git a/man/get_elev_profile.Rd b/man/get_elev_profile.Rd new file mode 100644 index 0000000..f4aba7d --- /dev/null +++ b/man/get_elev_profile.Rd @@ -0,0 +1,107 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/get_elev_profile.R +\name{get_elev_profile} +\alias{get_elev_profile} +\title{Get Point Elevation along a Profile Line} +\usage{ +get_elev_profile( + locations, + n = NULL, + density = NULL, + type = "regular", + sample = NULL, + units = NULL, + include = c("default", "dist", "cumdist"), + ..., + prj = NULL, + overwrite = FALSE, + coords = c("x", "y"), + elev_col = "elevation", + elev_units_col = "elev_units", + dist_col = "distance" +) +} +\arguments{ +\item{locations}{A sf, sfc, or sfg object with LINESTRING geometry.} + +\item{n}{integer; number of points to choose per geometry; if missing, n will be computed as \code{round(density * st_length(geom))}.} + +\item{density}{numeric; density (points per distance unit) of the sampling, possibly a vector of length equal to the number of features (otherwise recycled); \code{density} may be of class \code{units}.} + +\item{type}{character; indicate the sampling type, either "regular" or "random"} + +\item{sample}{numeric; a vector of numbers between 0 and 1 indicating the points to sample - if defined sample overrules n, density and type.} + +\item{units}{Default: \code{c("meters", "feet")}. Set to "meters" by default. Any +character string that is a valid distance unit supported by +\code{units::set_units()}.} + +\item{include}{Option of columns to include: one of "default", "dist", or +"cumdist". Default value returns same columns as \code{\link[=get_elev_point]{get_elev_point()}}. If +\code{include = "dist"}, the returned locations include the distance between +each successive pair of points. If \code{include = "cumdist"}, the distances are +provided as a cumulative sum.} + +\item{...}{Additional arguments passed to get_epqs or get_aws_points. When +using "aws" as the source, pay attention to the \code{z} argument. A +default of 5 is used, but this uses a raster with a large ~4-5 km +pixel. Additionally, the source data changes as zoom levels +increase. +Read \url{https://github.com/tilezen/joerd/blob/master/docs/data-sources.md#what-is-the-ground-resolution} +for details.} + +\item{prj}{A valid input to \code{\link{st_crs}}. This +argument is required for a \code{data.frame} of locations and optional +for \code{sf} locations.} + +\item{overwrite}{A logical indicating that existing \code{elevation} and +\code{elev_units} columns should be overwritten. Default is +FALSE and \code{get_elev_point} will error if these columns +already exist.} + +\item{coords}{in case of point data: names or numbers of the numeric columns holding coordinates} + +\item{elev_col}{Elevation column name.} + +\item{elev_units_col}{Elevation units column.} + +\item{dist_col}{Column name to use for optional distance column. Only used if +\code{include} is set to \code{"dist"} or \code{"cumdist"}.} +} +\description{ +\code{\link[=get_elev_profile]{get_elev_profile()}} allows users to provide LINESTRING inputs to +\code{\link[sf:st_line_sample]{sf::st_line_sample()}} or to cast LINESTRING to POINT before calling +\code{\link[=get_elev_point]{get_elev_point()}} to get the point elevations. The function allows users to +get elevation along a profile line and, optionally, include a distance or +cumulative distance column in the output sf data frame. +} +\examples{ +\dontrun{ +library(sf) +library(elevatr) + +nc <- st_read(system.file("shape/nc.shp", package = "sf")) |> + st_transform(3857) + +nc_line <- suppressWarnings( + st_cast( + st_union( + st_centroid(nc[1, ]), + st_centroid(nc[2, ]) + ), + to = "LINESTRING" + ) +) + +elev_point <- get_elev_profile( + nc_line, + units = "ft", + dist = TRUE, + cumulative = TRUE, + n = 10 +) + +elev_point + +} +} diff --git a/man/get_elev_raster.Rd b/man/get_elev_raster.Rd index 85fec22..bd0ac6b 100644 --- a/man/get_elev_raster.Rd +++ b/man/get_elev_raster.Rd @@ -16,95 +16,100 @@ get_elev_raster( override_size_check = FALSE, tmp_dir = tempdir(), ncpu = ifelse(future::availableCores() > 2, 2, 1), + coords = c("x", "y"), ... ) } \arguments{ -\item{locations}{Either a \code{data.frame} of x (long) and y (lat), an +\item{locations}{Either a \code{data.frame} of x (long) and y (lat), an \code{sf}, or \code{terra} object as input.} \item{z}{The zoom level to return. The zoom ranges from 1 to 14. Resolution -of the resultant raster is determined by the zoom and latitude. For -details on zoom and resolution see the documentation from Mapzen at +of the resultant raster is determined by the zoom and latitude. For +details on zoom and resolution see the documentation from Mapzen at \url{https://github.com/tilezen/joerd/blob/master/docs/data-sources.md#what-is-the-ground-resolution}. The z is not required for the OpenTopography data sources.} -\item{prj}{A valid input to \code{\link{st_crs}} If a \code{sf} -object or a \code{terra} object is provided as the \code{locations}, -the prj is optional and will be taken from \code{locations}. This +\item{prj}{A valid input to \code{\link{st_crs}} If a \code{sf} +object or a \code{terra} object is provided as the \code{locations}, +the prj is optional and will be taken from \code{locations}. This argument is required for a \code{data.frame} of locations.} -\item{src}{A character indicating which API to use. Currently supports "aws" -and "gl3", "gl1", "alos", or "srtm15plus" from the OpenTopography API global +\item{src}{A character indicating which API to use. Currently supports "aws" +and "gl3", "gl1", "alos", or "srtm15plus" from the OpenTopography API global datasets. "aws" is the default.} \item{expand}{A numeric value of a distance, in map units, used to expand the -bounding box that is used to fetch the terrain tiles. This can -be used for features that fall close to the edge of a tile or -for retrieving additional area around the feature. If the -feature is a single point, the area it returns will be small if +bounding box that is used to fetch the terrain tiles. This can +be used for features that fall close to the edge of a tile or +for retrieving additional area around the feature. If the +feature is a single point, the area it returns will be small if clip is set to "bbox". Default is NULL.} \item{clip}{A character value used to determine clipping of returned DEM. -The default value is "tile" which returns the full tiles. Other -options are "bbox" which returns the DEM clipped to the bounding -box of the original locations (or expanded bounding box if used), -or "locations" if the spatial data (e.g. polygons) in the input -locations should be used to clip the DEM. Locations are not used +The default value is "tile" which returns the full tiles. Other +options are "bbox" which returns the DEM clipped to the bounding +box of the original locations (or expanded bounding box if used), +or "locations" if the spatial data (e.g. polygons) in the input +locations should be used to clip the DEM. Locations are not used to clip input point datasets. Instead the bounding box is used.} -\item{verbose}{Toggles on and off the note about units and coordinate +\item{verbose}{Toggles on and off the note about units and coordinate reference system.} -\item{neg_to_na}{Some of the data sources return large negative numbers as -missing data. When the end result is a projected those -large negative numbers can vary. When set to TRUE, only +\item{neg_to_na}{Some of the data sources return large negative numbers as +missing data. When the end result is a projected those +large negative numbers can vary. When set to TRUE, only zero and positive values are returned. Default is FALSE.} -\item{override_size_check}{Boolean to override size checks. Any download +\item{override_size_check}{Boolean to override size checks. Any download between 100 Mb and 500Mb report a message but -continue. Between 500Mb and 3000Mb requires +continue. Between 500Mb and 3000Mb requires interaction and greater than 3000Mb fails. These can be overriden with this argument set to TRUE.} -\item{tmp_dir}{The location to store downloaded raster files. Defaults to a -temporary location. Alternatively, the user may supply an -existing path for these raster files. New folders are not +\item{tmp_dir}{The location to store downloaded raster files. Defaults to a +temporary location. Alternatively, the user may supply an +existing path for these raster files. New folders are not created by \code{get_elev_raster}.} -\item{ncpu}{Number of CPU's to use when downloading aws tiles. Defaults to 2 +\item{ncpu}{Number of CPU's to use when downloading aws tiles. Defaults to 2 if more than two available, 1 otherwise.} -\item{...}{Extra arguments to pass to \code{httr::GET} via a named vector, +\item{coords}{Coordinate column names passed to \code{\link[sf:st_as_sf]{sf::st_as_sf()}}. Defaults to +\code{c("x", "y")}.} + +\item{...}{Extra arguments to pass to \code{httr::GET} via a named vector, \code{config}. See \code{\link{get_aws_terrain}} for more details.} } \value{ -Function returns a \code{RasterLayer} in the projection - specified by the \code{prj} argument or in the projection of the - provided locations. In subsequent versions, a \code{SpatRaster} - will be returned. +Function returns a \code{RasterLayer} in the projection +specified by the \code{prj} argument or in the projection of the +provided locations. In subsequent versions, a \code{SpatRaster} +will be returned. } \description{ -Several web services provide access to raster elevation. Currently, this -function provides access to the Amazon Web Services Terrain Tiles and the -Open Topography global datasets API. The function accepts a \code{data.frame} -of x (long) and y (lat), an \code{sf}, or \code{terra} object as input. A +Several web services provide access to raster elevation. Currently, this +function provides access to the Amazon Web Services Terrain Tiles and the +Open Topography global datasets API. The function accepts a \code{data.frame} +of x (long) and y (lat), an \code{sf}, or \code{terra} object as input. A \code{RasterLayer} object is returned. In subsequent versions, a \code{SpatRaster} will be returned. } \details{ -Currently, the \code{get_elev_raster} function utilizes the - Amazon Web Services - (\url{https://registry.opendata.aws/terrain-tiles/}) terrain - tiles and the Open Topography Global Datasets API - (\url{https://opentopography.org/developers}). - - The AWS Terrain Tiles data is provided via x, y, and z tiles (see - \url{https://wiki.openstreetmap.org/wiki/Slippy_map_tilenames} for - details.) The x and y are determined from the bounding box of the - object submitted for \code{locations} argument, and the z argument - must be specified by the user. +Currently, the \code{get_elev_raster} function utilizes the +Amazon Web Services +(\url{https://registry.opendata.aws/terrain-tiles/}) terrain +tiles and the Open Topography Global Datasets API +(\url{https://opentopography.org/developers}). + +\if{html}{\out{
}}\preformatted{ The AWS Terrain Tiles data is provided via x, y, and z tiles (see + \url{https://wiki.openstreetmap.org/wiki/Slippy_map_tilenames} for + details.) The x and y are determined from the bounding box of the + object submitted for \code{locations} argument, and the z argument + must be specified by the user. +}\if{html}{\out{
}} } \examples{ \dontrun{ @@ -112,11 +117,11 @@ library(elevatr) library(sf) data(lake) lake_buff <- st_buffer(lake, 1000) -loc_df <- data.frame(x = runif(6,min=sf::st_bbox(lake)$xmin, +loc_df <- data.frame(x = runif(6,min=sf::st_bbox(lake)$xmin, max=sf::st_bbox(lake)$xmax), - y = runif(6,min=sf::st_bbox(lake)$ymin, + y = runif(6,min=sf::st_bbox(lake)$ymin, max=sf::st_bbox(lake)$ymax)) - + x <- get_elev_raster(locations = loc_df, prj = st_crs(lake) , z=10) x <- get_elev_raster(lake, z = 14) x <- get_elev_raster(lake, src = "gl3", expand = 5000) diff --git a/man/get_epqs.Rd b/man/get_epqs.Rd index 580a173..7b52e34 100644 --- a/man/get_epqs.Rd +++ b/man/get_epqs.Rd @@ -8,25 +8,26 @@ get_epqs( locations, units = c("meters", "feet"), ncpu = ifelse(future::availableCores() > 2, 2, 1), - serial = NULL + serial = NULL, + elev_col = "elevation" ) } \arguments{ -\item{locations}{A SpatialPointsDataFrame of the location(s) for which you -wish to return elevation. The first column is Longitude and +\item{locations}{A SpatialPointsDataFrame of the location(s) for which you +wish to return elevation. The first column is Longitude and the second column is Latitude.} -\item{units}{Character string of either meters or feet. Conversions for +\item{units}{Character string of either meters or feet. Conversions for 'epqs' are handled by the API itself.} \item{ncpu}{Number of CPU's to use when downloading epqs data.} -\item{serial}{Logical to determine if API should be hit in serial or in +\item{serial}{Logical to determine if API should be hit in serial or in parallel. TRUE will use purrr, FALSE will use furrr.} } \value{ -a list with a SpatialPointsDataFrame or sf POINT or MULTIPOINT object with - elevation added to the data slot and a character of the elevation units +a list with a SpatialPointsDataFrame or sf POINT or MULTIPOINT object with +elevation added to the data slot and a character of the elevation units } \description{ Function for accessing elevation data from the USGS epqs diff --git a/man/get_opentopo.Rd b/man/get_opentopo.Rd index cb221ff..65c30f6 100644 --- a/man/get_opentopo.Rd +++ b/man/get_opentopo.Rd @@ -10,26 +10,26 @@ Attribution: Details here get_opentopo(locations, src, prj, expand = NULL, tmp_dir = tempdir(), ...) } \arguments{ -\item{locations}{Either a \code{data.frame} of x (long) and y (lat), an +\item{locations}{Either a \code{data.frame} of x (long) and y (lat), an \code{sp}, an \code{sf}, or \code{raster} object as input.} -\item{prj}{A valid input to \code{\link{st_crs}} If a \code{sf} -object or a \code{terra} object is provided as the \code{locations}, -the prj is optional and will be taken from \code{locations}. This +\item{prj}{A valid input to \code{\link{st_crs}} If a \code{sf} +object or a \code{terra} object is provided as the \code{locations}, +the prj is optional and will be taken from \code{locations}. This argument is required for a \code{data.frame} of locations.} \item{expand}{A numeric value of a distance, in map units, used to expand the bounding box that is used to fetch the SRTM data.} -\item{tmp_dir}{The location to store downloaded raster files. Defaults to a -temporary location. Alternatively, the user may supply an -existing path for these raster files. New folders are not +\item{tmp_dir}{The location to store downloaded raster files. Defaults to a +temporary location. Alternatively, the user may supply an +existing path for these raster files. New folders are not created by \code{get_elev_raster}.} -\item{...}{Extra configuration parameters to be passed to httr::GET. Common -usage is to adjust timeout. This is done as -\code{config=timeout(x)} where \code{x} is a numeric value in -seconds. Multiple configuration functions may be passed as a +\item{...}{Extra configuration parameters to be passed to httr::GET. Common +usage is to adjust timeout. This is done as +\code{config=timeout(x)} where \code{x} is a numeric value in +seconds. Multiple configuration functions may be passed as a vector.} } \description{ diff --git a/man/lake.Rd b/man/lake.Rd index 211b3ad..3630578 100644 --- a/man/lake.Rd +++ b/man/lake.Rd @@ -8,7 +8,7 @@ SpatialPolygonDataframe with 1 lakes, each with 13 variables } \description{ -This example data is a SpatialPolygonsDataFrame +This example data is a SpatialPolygonsDataFrame of a single lake, Lake Sunapee. Used for examples and tests. } \keyword{datasets} diff --git a/man/loc_check.Rd b/man/loc_check.Rd index 1146d27..899d4bb 100644 --- a/man/loc_check.Rd +++ b/man/loc_check.Rd @@ -2,13 +2,21 @@ % Please edit documentation in R/internal.R \name{loc_check} \alias{loc_check} -\title{function to check input type and projection. All input types convert to a -SpatialPointsDataFrame for point elevation and bbx for raster.} +\title{function to check and prepare input locations} \usage{ -loc_check(locations, prj = NULL) +loc_check(locations, prj = NULL, coords = c("x", "y"), elev_col = "elevation") +} +\arguments{ +\item{prj}{A valid input to \code{\link{st_crs}} If a \code{sf} +object or a \code{terra} object is provided as the \code{locations}, +the prj is optional and will be taken from \code{locations}. This +argument is required for a \code{data.frame} of locations.} + +\item{coords}{in case of point data: names or numbers of the numeric columns holding coordinates} + +\item{elev_col}{Elevation column name.} } \description{ -function to check input type and projection. All input types convert to a -SpatialPointsDataFrame for point elevation and bbx for raster. +All input types convert to a sf data frame. } \keyword{internal} diff --git a/man/loc_linestring_to_point.Rd b/man/loc_linestring_to_point.Rd new file mode 100644 index 0000000..d0ae27b --- /dev/null +++ b/man/loc_linestring_to_point.Rd @@ -0,0 +1,30 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/internal.R +\name{loc_linestring_to_point} +\alias{loc_linestring_to_point} +\title{function sf, sfc, or sfg with LINESTRING geometry to POINT geometry} +\usage{ +loc_linestring_to_point( + locations, + n = NULL, + density = NULL, + type = "regular", + sample = NULL +) +} +\arguments{ +\item{locations}{A sf, sfc, or sfg object with LINESTRING geometry.} + +\item{n}{integer; number of points to choose per geometry; if missing, n will be computed as \code{round(density * st_length(geom))}.} + +\item{density}{numeric; density (points per distance unit) of the sampling, possibly a vector of length equal to the number of features (otherwise recycled); \code{density} may be of class \code{units}.} + +\item{type}{character; indicate the sampling type, either "regular" or "random"} + +\item{sample}{numeric; a vector of numbers between 0 and 1 indicating the points to sample - if defined sample overrules n, density and type.} +} +\description{ +Optionally use \code{\link[sf:st_line_sample]{sf::st_line_sample()}} when n, density, or sample are +supplied. +} +\keyword{internal} diff --git a/man/merge_rasters.Rd b/man/merge_rasters.Rd index 688d418..b7c2f01 100644 --- a/man/merge_rasters.Rd +++ b/man/merge_rasters.Rd @@ -17,18 +17,18 @@ merge_rasters( \item{target_prj}{the target projection of the output raster} -\item{method}{the method for resampling/reprojecting. Default is 'bilinear'. -Options can be found [here](https://gdal.org/programs/gdalwarp.html#cmdoption-gdalwarp-r)} +\item{method}{the method for resampling/reprojecting. Default is 'bilinear'. +Options can be found \href{https://gdal.org/programs/gdalwarp.html#cmdoption-gdalwarp-r}{here}} \item{returnRaster}{if TRUE, return a raster object (default), else, return the file path to the object} -\item{tmp_dir}{The location to store downloaded raster files. Defaults to a -temporary location. Alternatively, the user may supply an -existing path for these raster files. New folders are not +\item{tmp_dir}{The location to store downloaded raster files. Defaults to a +temporary location. Alternatively, the user may supply an +existing path for these raster files. New folders are not created by \code{get_elev_raster}.} } \description{ -Merge multiple downloaded raster files into a single file. The input `target_prj` +Merge multiple downloaded raster files into a single file. The input \code{target_prj} describes the projection for the new grid. } \keyword{internal} diff --git a/man/proj_expand.Rd b/man/proj_expand.Rd index c7fb804..9b27f4b 100644 --- a/man/proj_expand.Rd +++ b/man/proj_expand.Rd @@ -4,7 +4,7 @@ \alias{proj_expand} \title{function to project bounding box and if needed expand it} \usage{ -proj_expand(locations, prj, expand) +proj_expand(locations, prj, expand = NULL) } \description{ function to project bounding box and if needed expand it diff --git a/man/relocate_sf_col_end.Rd b/man/relocate_sf_col_end.Rd new file mode 100644 index 0000000..060e75f --- /dev/null +++ b/man/relocate_sf_col_end.Rd @@ -0,0 +1,12 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/internal.R +\name{relocate_sf_col_end} +\alias{relocate_sf_col_end} +\title{Make sf column the last column in a sf data frame} +\usage{ +relocate_sf_col_end(x) +} +\description{ +Make sf column the last column in a sf data frame +} +\keyword{internal} diff --git a/man/set_opentopo_key.Rd b/man/set_opentopo_key.Rd index d6fe684..d4a7abe 100644 --- a/man/set_opentopo_key.Rd +++ b/man/set_opentopo_key.Rd @@ -7,11 +7,11 @@ set_opentopo_key(key) } \arguments{ -\item{key}{An OpenTopography API Key as a character. For details on obtaining an +\item{key}{An OpenTopography API Key as a character. For details on obtaining an OpenTopgraphy key see \url{https://opentopography.org/blog/introducing-api-keys-access-opentopography-global-datasets}.} } \description{ This function stores an OpenTopgrapy key in a local .Renviron file. If the -.Renviron file exists, the key will be appended. This will typically only +.Renviron file exists, the key will be appended. This will typically only need to be done once per machine. } diff --git a/man/sf_big.Rd b/man/sf_big.Rd index 375d13a..433d77a 100644 --- a/man/sf_big.Rd +++ b/man/sf_big.Rd @@ -8,7 +8,7 @@ A sf POINT object } \description{ -This sf POINT dataset is 250 uniform random points to be used for +This sf POINT dataset is 250 uniform random points to be used for examples and tests } \keyword{datasets} diff --git a/man/st_point_distances.Rd b/man/st_point_distances.Rd new file mode 100644 index 0000000..a581e97 --- /dev/null +++ b/man/st_point_distances.Rd @@ -0,0 +1,29 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/internal.R +\name{st_point_distances} +\alias{st_point_distances} +\title{Get distances between successive pairs of points} +\usage{ +st_point_distances( + x, + cumulative = TRUE, + units = NULL, + prj = sf::st_crs(x), + ... +) +} +\arguments{ +\item{...}{ + Arguments passed on to \code{\link[sf:geos_measures]{sf::st_distance}} + \describe{ + \item{\code{dist_fun}}{deprecated} + \item{\code{by_element}}{logical; if \code{TRUE}, return a vector with distance between the first elements of \code{x} and \code{y}, the second, etc; an error is raised if \code{x} and \code{y} are not the same length. If \code{FALSE}, return the dense matrix with all pairwise distances.} + \item{\code{which}}{character; for Cartesian coordinates only: one of \code{Euclidean}, \code{Hausdorff} or \code{Frechet}; for geodetic coordinates, great circle distances are computed; see details} + \item{\code{par}}{for \code{which} equal to \code{Hausdorff} or \code{Frechet}, optionally use a value between 0 and 1 to densify the geometry} + \item{\code{tolerance}}{ignored if \code{st_is_longlat(x)} is \code{FALSE}; otherwise, if set to a positive value, the first distance smaller than \code{tolerance} will be returned, and true distance may be smaller; this may speed up computation. In meters, or a \code{units} object convertible to meters.} + }} +} +\description{ +Get distances between successive pairs of points +} +\keyword{internal} diff --git a/tests/testthat/test-internal.R b/tests/testthat/test-internal.R index 3d12fe8..074d6b5 100644 --- a/tests/testthat/test-internal.R +++ b/tests/testthat/test-internal.R @@ -11,8 +11,8 @@ ll_prj <- 4326 aea_prj <- 5072 sf_sm <- st_as_sf(pt_df, coords = c("x", "y"), crs = ll_prj) -sf_sm_prj <- st_transform(sf_sm, crs = aea_prj) -bad_sf <- st_as_sf(data.frame(x = 1000, y = 1000), coords = c("x", "y"), +sf_sm_prj <- st_transform(sf_sm, crs = aea_prj) +bad_sf <- st_as_sf(data.frame(x = 1000, y = 1000), coords = c("x", "y"), crs = ll_prj) rast <- terra::rasterize(st_coordinates(sf_sm),terra::rast(sf_sm)) sf_sm_na <- sf_sm @@ -26,24 +26,24 @@ mts <- rbind(mt_wash,mt_mans) mts$name <- c("Mount Washington", "Mount Mansfield") test_that("data frame with more extra columns work", { - + mts_with_names_and_elevation <- get_elev_point(mts, ll_prj) expect_true("name" %in% names(mts_with_names_and_elevation)) }) test_that("proj_expand works",{ - - mans_sf <- st_as_sf(data.frame(x = -72.8145, y = 44.5438), + + mans_sf <- st_as_sf(data.frame(x = -72.8145, y = 44.5438), coords = c("x","y"), crs = ll_prj) mans <- get_elev_raster(locations = mans_sf, z = 6) mans_exp <- get_elev_raster(locations = mans_sf, z = 6, expand = 2) - + rast_elev <- get_elev_raster(locations = rast, z = 5) rast_elev_exp <- get_elev_raster(locations = rast, z = 5, exp = 5) expect_gt(ncell(mans_exp),ncell(mans)) expect_gt(ncell(rast_elev_exp), ncell(rast_elev)) - + origin_sf <- st_as_sf(data.frame(x = 0, y = 0), coords = c("x", "y"), crs = ll_prj) origins <- get_elev_raster(locations = origin_sf, z = 6) @@ -53,19 +53,19 @@ test_that("proj_expand works",{ test_that("loc_check errors correctly", { empty_rast <- rast(nrow = 1, ncol =1) - expect_error(get_elev_point(locations = pt_df), - "Please supply a valid sf crs via locations or prj.") + expect_error(get_elev_point(locations = pt_df), + "Please supply a valid crs via locations or prj.") expect_error(get_elev_point(locations = rast_na), - "Please supply a valid sf crs via locations or prj.") + "Please supply a valid crs via locations or prj.") expect_error(get_elev_point(locations = sf_sm_na), - "Please supply an sf object with a valid crs.") + "Please supply an sf or sfc object with a valid crs.") }) test_that("Z of 1 or 0 works in get_tilexy",{ sf_sm_1 <- get_elev_raster(sf_sm, z = 1, clip = "bbox") sf_sm_0 <- get_elev_raster(sf_sm, z = 0, clip = "bbox") - + expect_gt(max(res(sf_sm_1)), 0.27) expect_gt(max(res(sf_sm_0)), 0.54) }) diff --git a/vignettes/introduction_to_elevatr.Rmd b/vignettes/introduction_to_elevatr.Rmd index 02abe09..c2b94d4 100644 --- a/vignettes/introduction_to_elevatr.Rmd +++ b/vignettes/introduction_to_elevatr.Rmd @@ -209,4 +209,4 @@ Below is an example for grabbing the OpenTopography SRTM data. lake_srtmgl1 <- get_elev_raster(lake, src = "gl1", clip = "bbox", expand = 1000) plot(lake_srtmgl1) plot(st_geometry(lake), add = TRUE, col = "blue") -``` \ No newline at end of file +```