Skip to content

Commit

Permalink
Merge pull request #1278 from M3nin0/reg-bdc-tiles
Browse files Browse the repository at this point in the history
review dependencies, merge and url parser
  • Loading branch information
M3nin0 authored Jan 31, 2025
2 parents c3c2253 + 5504856 commit e817c17
Show file tree
Hide file tree
Showing 12 changed files with 107 additions and 40 deletions.
3 changes: 0 additions & 3 deletions .github/workflows/R-CMD-check.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -45,9 +45,6 @@ jobs:
extra-packages: any::rcmdcheck
needs: check

- name: Install tmap package from GitHub
run: Rscript -e "install.packages('remotes'); remotes::install_github('r-tmap/tmap')"

- uses: r-lib/actions/check-r-package@v2
with:
upload-snapshots: true
12 changes: 5 additions & 7 deletions DESCRIPTION
Original file line number Diff line number Diff line change
Expand Up @@ -56,19 +56,21 @@ Imports:
dplyr (>= 1.1.0),
grDevices,
graphics,
leaflet (>= 2.2.0),
lubridate,
luz (>= 0.4.0),
parallel,
purrr (>= 1.0.2),
randomForest,
Rcpp (>= 1.0.13),
rstac (>= 1.0.1),
sf (>= 1.0-19),
showtext,
sysfonts,
slider (>= 0.2.0),
stats,
terra (>= 1.8-5),
tibble (>= 3.1),
tidyr (>= 1.3.0),
tmap (>= 3.9),
torch (>= 0.13.0),
units,
utils
Expand All @@ -88,17 +90,14 @@ Suggests:
gdalcubes (>= 0.7.0),
geojsonsf,
ggplot2,
httr2,
httr2 (>= 1.1.0),
jsonlite,
kohonen (>= 3.0.11),
leaflet (>= 2.2.0),
luz (>= 0.4.0),
methods,
mgcv,
nnet,
openxlsx,
proxy,
randomForest,
randomForestExplainer,
RColorBrewer,
RcppArmadillo (>= 0.12),
Expand All @@ -107,7 +106,6 @@ Suggests:
stringr,
supercells (>= 1.0.0),
testthat (>= 3.1.3),
tmap (>= 3.9),
tools,
xgboost
Config/testthat/edition: 3
Expand Down
1 change: 1 addition & 0 deletions NAMESPACE
Original file line number Diff line number Diff line change
Expand Up @@ -154,6 +154,7 @@ S3method(.raster_yres,terra)
S3method(.reg_tile_convert,dem_cube)
S3method(.reg_tile_convert,grd_cube)
S3method(.reg_tile_convert,rainfall_cube)
S3method(.reg_tile_convert,raster_cube)
S3method(.reg_tile_convert,rtc_cube)
S3method(.request,httr2)
S3method(.request_check_package,httr2)
Expand Down
5 changes: 1 addition & 4 deletions R/api_check.R
Original file line number Diff line number Diff line change
Expand Up @@ -1541,10 +1541,7 @@
#' @return Called for side effects.
.check_cube_is_regular <- function(cube) {
.check_set_caller(".check_cube_is_regular")
.check_that(.cube_is_complete(cube))
.check_that(.cube_has_unique_bbox(cube))
.check_that(.cube_has_unique_tile_size(cube))
.check_that(length(.cube_timeline(cube)) == 1)
.check_that(.cube_is_regular(cube))
return(invisible(TRUE))
}
#' @title Does the input data contain a sits accuracy object?
Expand Down
17 changes: 0 additions & 17 deletions R/api_conf.R
Original file line number Diff line number Diff line change
Expand Up @@ -367,23 +367,6 @@
.conf_colors <- function() {
return(sits_env[["color_table"]])
}
#' @title Configure fonts to be used
#' @name .conf_set_fonts
#' @keywords internal
#' @noRd
#' @return NULL, called for side effects
#'
.conf_set_fonts <- function() {
# verifies if sysfonts package is installed
.check_require_packages("sysfonts")
.check_require_packages("showtext")
showtext::showtext_auto()
sysfonts::font_add_google("IBM Plex Sans", family = "plex_sans")
sysfonts::font_add_google("Roboto", family = "roboto")
sysfonts::font_add_google("Lato", family = "lato")

return(NULL)
}
#' @title Return the user configuration set in enviromental variable
#' @name .conf_user_env_var
#' @keywords internal
Expand Down
25 changes: 24 additions & 1 deletion R/api_cube.R
Original file line number Diff line number Diff line change
Expand Up @@ -809,6 +809,29 @@ NULL
is_complete <- .cube_is_complete(cube)
return(is_complete)
}
#' @title Check that cube is regular
#' @name .cube_is_regular
#' @keywords internal
#' @noRd
#' @param cube datacube
#' @return Called for side effects.
.cube_is_regular <- function(cube) {
.check_set_caller(".cube_is_regular")
is_regular <- TRUE
if (!.cube_is_complete(cube)) {
is_regular <- FALSE
}
if (!.cube_has_unique_bbox(cube)) {
is_regular <- FALSE
}
if (!.cube_has_unique_tile_size(cube)) {
is_regular <- FALSE
}
if (length(.cube_timeline(cube)) > 1) {
is_regular <- FALSE
}
return(is_regular)
}

#' @title Check that cube is a base cube
#' @name .cube_is_base
Expand Down Expand Up @@ -1444,7 +1467,7 @@ NULL
# check that token is valid
.check_that(.has(res_content))
# parse token
token_parsed <- .url_parse(paste0("?", res_content[["token"]]))
token_parsed <- .url_parse_query(res_content[["token"]])
file_info[["path"]] <- purrr::map_chr(seq_along(fi_paths), function(i) {
path <- fi_paths[[i]]
if (are_local_paths[[i]]) {
Expand Down
54 changes: 54 additions & 0 deletions R/api_regularize.R
Original file line number Diff line number Diff line change
Expand Up @@ -178,6 +178,60 @@
UseMethod(".reg_tile_convert", cube)
}

#' @noRd
#' @export
.reg_tile_convert.raster_cube <- function(cube, grid_system, roi = NULL, tiles = NULL) {
# if roi and tiles are not provided, use the whole cube as extent
if (!.has(roi) && !.has(tiles)) {
roi <- .cube_as_sf(cube)
}

# generate system grid tiles and intersects it with doi
tiles_filtered <- .grid_filter_tiles(
grid_system = grid_system, tiles = tiles, roi = roi
)

# save original cube classes
cube_class <- class(cube)

# redistribute data into tiles
cube <- tiles_filtered |>
dplyr::rowwise() |>
dplyr::group_map(~{
# prepare a sf object representing the bbox of each image in
# file_info
cube_fi <- dplyr::bind_rows(cube[["file_info"]])
# extract bounding box from files
fi_bbox <- .bbox_as_sf(.bbox(
x = cube_fi,
default_crs = cube,
by_feature = TRUE
), as_crs = .x[["crs"]])
# check intersection between files and tile
file_info <- cube_fi[.intersects(fi_bbox, .x), ]
.cube_create(
source = .tile_source(cube),
collection = .tile_collection(cube),
satellite = .tile_satellite(cube),
sensor = .tile_sensor(cube),
tile = .x[["tile_id"]],
xmin = .xmin(.x),
xmax = .xmax(.x),
ymin = .ymin(.x),
ymax = .ymax(.x),
crs = .x[["crs"]],
file_info = file_info
)
}) |>
dplyr::bind_rows()

# filter non-empty file info
cube <- .cube_filter_nonempty(cube)

# Finalize customizing cube class
.cube_set_class(cube, cube_class)
}

#' @noRd
#' @export
.reg_tile_convert.grd_cube <- function(cube, grid_system, roi = NULL, tiles = NULL) {
Expand Down
13 changes: 13 additions & 0 deletions R/api_request_httr2.R
Original file line number Diff line number Diff line change
Expand Up @@ -223,6 +223,19 @@
httr2::url_parse(url)
}

#' @title Parse URL
#' @name .url_parse_query
#' @keywords internal
#' @noRd
#' @author Felipe Carvalho, \email{felipe.carvalho@@inpe.br}
#'
#' @param url A character with URL query string.
#'
#' @return An character vector with parsed URL query string.
.url_parse_query <- function(url) {
httr2::url_parse(url)
}

#' @title Build an URL
#' @name .url_build
#' @keywords internal
Expand Down
12 changes: 7 additions & 5 deletions R/sits_regularize.R
Original file line number Diff line number Diff line change
Expand Up @@ -178,11 +178,13 @@ sits_regularize.raster_cube <- function(cube, ...,
# Convert input cube to the user's provided grid system
if (.has(grid_system)) {
.check_grid_system(grid_system)
cube <- .reg_tile_convert(
cube = cube,
grid_system = grid_system,
roi = roi,
tiles = tiles
cube <- suppressWarnings(
.reg_tile_convert(
cube = cube,
grid_system = grid_system,
roi = roi,
tiles = tiles
)
)
.check_that(nrow(cube) > 0,
msg = .conf("messages", "sits_regularize_roi")
Expand Down
2 changes: 0 additions & 2 deletions R/sits_tuning.R
Original file line number Diff line number Diff line change
Expand Up @@ -114,8 +114,6 @@ sits_tuning <- function(samples,
.tuning_pick_random,
params = params
)
# Update multicores
multicores <- .ml_update_multicores(ml_model, multicores)
# start processes
.parallel_start(workers = multicores)
on.exit(.parallel_stop())
Expand Down
3 changes: 2 additions & 1 deletion man/sits_classify.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Empty file removed src/normalize_data-22e72bf4.o.tmp
Empty file.

0 comments on commit e817c17

Please sign in to comment.