Skip to content

Commit 78cac6b

Browse files
committed
Merge branch 'dev' of https://github.com/e-sensing/sits into dev
2 parents 8322d02 + 4137284 commit 78cac6b

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

55 files changed

+387
-267
lines changed

NAMESPACE

+2
Original file line numberDiff line numberDiff line change
@@ -58,6 +58,7 @@ S3method(.cube_is_local,raster_cube)
5858
S3method(.cube_is_token_expired,default)
5959
S3method(.cube_is_token_expired,mpc_cube)
6060
S3method(.cube_labels,default)
61+
S3method(.cube_labels,derived_cube)
6162
S3method(.cube_labels,raster_cube)
6263
S3method(.cube_labels,tbl_df)
6364
S3method(.cube_merge_tiles,default)
@@ -316,6 +317,7 @@ S3method(sits_apply,raster_cube)
316317
S3method(sits_apply,sits)
317318
S3method(sits_as_sf,raster_cube)
318319
S3method(sits_as_sf,sits)
320+
S3method(sits_bands,base_raster_cube)
319321
S3method(sits_bands,default)
320322
S3method(sits_bands,patterns)
321323
S3method(sits_bands,raster_cube)

R/api_apply.R

+1-1
Original file line numberDiff line numberDiff line change
@@ -197,7 +197,7 @@
197197
result <-
198198
.apply(data, col = "time_series", fn = function(x, ...) {
199199
dplyr::mutate(x, dplyr::across(
200-
dplyr::matches(sits_bands(data)),
200+
dplyr::matches(.samples_bands(data)),
201201
fn, ...
202202
))
203203
}, ...)

R/api_band.R

+2-2
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
#' @return Updated sits object
1616
#' @export
1717
.band_rename.sits <- function(x, bands) {
18-
data_bands <- sits_bands(x)
18+
data_bands <- .samples_bands(x)
1919
# pre-condition
2020
.check_chr(
2121
bands,
@@ -42,7 +42,7 @@
4242
#' @return updated sits object
4343
#' @export
4444
.band_rename.raster_cube <- function(x, bands) {
45-
data_bands <- sits_bands(x)
45+
data_bands <- .cube_bands(x)
4646
# pre-condition
4747
.check_chr(
4848
bands,

R/api_check.R

+15-15
Original file line numberDiff line numberDiff line change
@@ -1697,17 +1697,17 @@
16971697
samples_validation <- .check_samples(samples_validation)
16981698
# check if the labels matches with train data
16991699
.check_that(
1700-
all(sits_labels(samples_validation) %in% labels) &&
1701-
all(labels %in% sits_labels(samples_validation))
1700+
all(.samples_labels(samples_validation) %in% labels) &&
1701+
all(labels %in% .samples_labels(samples_validation))
17021702
)
17031703
# check if the timeline matches with train data
17041704
.check_that(
1705-
length(sits_timeline(samples_validation)) == length(timeline)
1705+
length(.samples_timeline(samples_validation)) == length(timeline)
17061706
)
17071707
# check if the bands matches with train data
17081708
.check_that(
1709-
all(sits_bands(samples_validation) %in% bands) &&
1710-
all(bands %in% sits_bands(samples_validation))
1709+
all(.samples_bands(samples_validation) %in% bands) &&
1710+
all(bands %in% .samples_bands(samples_validation))
17111711
)
17121712
return(invisible(samples_validation))
17131713
}
@@ -1763,8 +1763,8 @@
17631763
cols <- .pred_cols # From predictors API
17641764
.check_that(cols %in% colnames(pred))
17651765
.check_that(nrow(pred) > 0)
1766-
n_bands <- length(sits_bands(samples))
1767-
n_times <- length(sits_timeline(samples))
1766+
n_bands <- length(.samples_bands(samples))
1767+
n_times <- length(.samples_timeline(samples))
17681768
.check_that(ncol(pred) == 2 + n_bands * n_times)
17691769
return(invisible(pred))
17701770
}
@@ -1802,8 +1802,8 @@
18021802
.check_samples_tile_match_timeline <- function(samples, tile) {
18031803
.check_set_caller(".check_samples_tile_match_timeline")
18041804
# do they have the same timelines?
1805-
samples_timeline_length <- length(sits_timeline(samples))
1806-
tiles_timeline_length <- length(sits_timeline(tile))
1805+
samples_timeline_length <- length(.samples_timeline(samples))
1806+
tiles_timeline_length <- length(.tile_timeline(tile))
18071807
.check_that(samples_timeline_length == tiles_timeline_length)
18081808
return(invisible(samples))
18091809
}
@@ -1817,8 +1817,8 @@
18171817
.check_samples_tile_match_bands <- function(samples, tile) {
18181818
.check_set_caller(".check_samples_tile_match_bands")
18191819
# do they have the same bands?
1820-
tile_bands <- sits_bands(tile)
1821-
bands <- sits_bands(samples)
1820+
tile_bands <- .tile_bands(tile)
1821+
bands <- .samples_bands(samples)
18221822
.check_that(all(bands %in% tile_bands))
18231823
return(invisible(samples))
18241824
}
@@ -1855,7 +1855,7 @@
18551855
})
18561856
classes_num <- unique(unlist(classes_list))
18571857
classes_num <- classes_num[!is.na(classes_num)]
1858-
labels_num <- names(sits_labels(cube))
1858+
labels_num <- names(.cube_labels(cube))
18591859
# do the labels and raster numbers match?
18601860
.check_that(all(classes_num %in% labels_num))
18611861
return(invisible(cube))
@@ -1964,8 +1964,8 @@
19641964
.check_cubes_same_labels <- function(cube1, cube2) {
19651965
.check_set_caller(".check_cubes_same_labels")
19661966
.check_that(
1967-
all(sits_labels(cube1) %in% sits_labels(cube2)) &&
1968-
all(sits_labels(cube2) %in% sits_labels(cube1))
1967+
all(.cube_labels(cube1) %in% .cube_labels(cube2)) &&
1968+
all(.cube_labels(cube2) %in% .cube_labels(cube1))
19691969
)
19701970
return(invisible(cube1))
19711971
}
@@ -1978,7 +1978,7 @@
19781978
#' @return Called for side effects.
19791979
.check_cubes_same_timeline <- function(cube1, cube2) {
19801980
.check_set_caller(".check_cubes_same_timeline")
1981-
.check_that(all(sits_timeline(cube1) == sits_timeline(cube2)))
1981+
.check_that(all(.cube_timeline(cube1)[[1]] == .cube_timeline(cube2)[[1]]))
19821982
return(invisible(cube1))
19831983
}
19841984
#' @title Check if two cubes have the same organization

R/api_combine_predictions.R

+3-3
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@
3535
job_memsize <- .jobs_memsize(
3636
job_size = .block_size(block = block_size),
3737
npaths = length(probs_cubes) * nrow(base_cube) *
38-
length(sits_labels(base_cube)),
38+
length(.cube_labels(base_cube)),
3939
nbytes = 8,
4040
proc_bloat = .conf("processing_bloat_cpu")
4141
)
@@ -223,7 +223,7 @@
223223
# Combine by average
224224
values <- weighted_probs(values, weights)
225225
# get the number of labels
226-
n_labels <- length(sits_labels(cubes[[1]]))
226+
n_labels <- length(.cube_labels(cubes[[1]]))
227227
# Are the results consistent with the data input?
228228
.check_processed_values(values, input_pixels)
229229
.check_processed_labels(values, n_labels)
@@ -248,7 +248,7 @@
248248
# Combine by average
249249
values <- weighted_uncert_probs(values, uncert_values)
250250
# get the number of labels
251-
n_labels <- length(sits_labels(cubes[[1]]))
251+
n_labels <- length(.cube_labels(cubes[[1]]))
252252
# Are the results consistent with the data input?
253253
.check_processed_values(values, input_pixels)
254254
.check_processed_labels(values, n_labels)

R/api_cube.R

+21-4
Original file line numberDiff line numberDiff line change
@@ -51,13 +51,13 @@ NULL
5151
} else {
5252
stop(.conf("messages", ".cube_find_class"))
5353
}
54-
if (all(sits_bands(cube) %in% .conf("sits_probs_bands"))) {
54+
if (all(.cube_bands(cube) %in% .conf("sits_probs_bands"))) {
5555
class(cube) <- c("probs_cube", "derived_cube", class(cube))
56-
} else if (all(sits_bands(cube) == "class")) {
56+
} else if (all(.cube_bands(cube) == "class")) {
5757
class(cube) <- c("class_cube", "derived_cube", class(cube))
58-
} else if (all(sits_bands(cube) == "variance")) {
58+
} else if (all(.cube_bands(cube) == "variance")) {
5959
class(cube) <- c("variance_cube", "derived_cube", class(cube))
60-
} else if (all(sits_bands(cube) %in% .conf("sits_uncert_bands"))) {
60+
} else if (all(.cube_bands(cube) %in% .conf("sits_uncert_bands"))) {
6161
class(cube) <- c("uncert_cube", "derived_cube", class(cube))
6262
} else {
6363
class(cube) <- c("eo_cube", class(cube))
@@ -219,6 +219,10 @@ NULL
219219
UseMethod(".cube_labels", cube)
220220
}
221221
#' @export
222+
.cube_labels.derived_cube <- function(cube, dissolve = FALSE) {
223+
return(cube[["labels"]][[1]])
224+
}
225+
#' @export
222226
.cube_labels.raster_cube <- function(cube, dissolve = TRUE) {
223227
labels <- .compact(slider::slide(cube, .tile_labels))
224228
if (dissolve) {
@@ -1268,3 +1272,16 @@ NULL
12681272
})
12691273
return(unlist(cube_chunks, recursive = FALSE))
12701274
}
1275+
#' @title Return base info
1276+
#' @name .cube_has_base_info
1277+
#' @keywords internal
1278+
#' @noRd
1279+
#' @author Gilberto Camara, \email{gilberto.camara@@inpe.br}
1280+
#'
1281+
#' @param cube Raster cube
1282+
#' @return TRUE/FALSE
1283+
#'
1284+
#'
1285+
.cube_has_base_info <- function(cube) {
1286+
return(.has(cube[["base_info"]]))
1287+
}

R/api_data.R

+87-13
Original file line numberDiff line numberDiff line change
@@ -54,8 +54,9 @@
5454
rast <- .raster_open_rast(.tile_path(cube))
5555
block <- .raster_file_blocksize(rast)
5656
# 1st case - split samples by tiles
57-
if (.raster_nrows(rast) == block[["nrows"]] &&
58-
.raster_ncols(rast) == block[["ncols"]]) {
57+
if ((.raster_nrows(rast) == block[["nrows"]] &&
58+
.raster_ncols(rast) == block[["ncols"]]) ||
59+
inherits(cube, "dem_cube")) {
5960
# split samples by bands and tile
6061
ts_tbl <- .data_by_tile(
6162
cube = cube,
@@ -78,6 +79,18 @@
7879
progress = progress
7980
)
8081
}
82+
if (.has(cube[["base_info"]])) {
83+
cube_base <- cube[["base_info"]][[1]]
84+
bands_base <- .cube_bands(cube_base)
85+
base_tbl <- .data_get_ts(
86+
cube = cube_base,
87+
samples = samples,
88+
bands = bands_base,
89+
impute_fn = impute_fn,
90+
multicores = multicores,
91+
progress = progress
92+
)
93+
}
8194
return(ts_tbl)
8295
}
8396

@@ -102,7 +115,7 @@
102115
}
103116
.check_cube_bands(cube, bands = bands)
104117
# get cubes timeline
105-
tl <- sits_timeline(cube)
118+
tl <- .cube_timeline(cube)[[1]]
106119
# create tile-band pairs for parallelization
107120
tiles_bands <- tidyr::expand_grid(
108121
tile = .cube_tiles(cube),
@@ -126,7 +139,7 @@
126139
# select tile and band
127140
tile_id <- tile_band[[1]]
128141
band <- tile_band[[2]]
129-
tile <- sits_select(cube, bands = band, tiles = tile_id)
142+
tile <- .select_raster_cube(cube, bands = band, tiles = tile_id)
130143
# create a hash to store temporary samples file
131144
hash_bundle <- digest::digest(list(tile, samples), algo = "md5")
132145
filename <- .file_path(
@@ -252,7 +265,7 @@
252265
hash_bundle <- purrr::map_chr(tiles_bands, function(tile_band) {
253266
tile_id <- tile_band[[1]]
254267
band <- tile_band[[2]]
255-
tile <- sits_select(cube, bands = band, tiles = tile_id)
268+
tile <- .select_raster_cube(cube, bands = band, tiles = tile_id)
256269
digest::digest(list(tile, samples), algo = "md5")
257270
})
258271
# recreate file names to delete them
@@ -308,7 +321,7 @@
308321
#'
309322
#' @return A sits tibble with the average of all points by each polygon.
310323
.data_avg_polygon <- function(data) {
311-
bands <- sits_bands(data)
324+
bands <- .samples_bands(data)
312325
columns_to_avg <- c(bands, "latitude", "longitude")
313326
data_avg <- data |>
314327
tidyr::unnest(cols = "time_series") |>
@@ -351,7 +364,7 @@
351364
progress) {
352365
.check_set_caller(".data_by_tile")
353366
# Get cube timeline
354-
tl <- sits_timeline(cube)
367+
tl <- .cube_timeline(cube)[[1]]
355368
# Get tile-band combination
356369
tiles_bands <- .cube_split_tiles_bands(cube = cube, bands = bands)
357370
# Set output_dir
@@ -371,7 +384,7 @@
371384
tile_id <- tile_band[[1]]
372385
band <- tile_band[[2]]
373386

374-
tile <- sits_select(
387+
tile <- .select_raster_cube(
375388
data = cube,
376389
bands = c(band, cld_band),
377390
tiles = tile_id
@@ -508,7 +521,9 @@
508521
hash_bundle <- purrr::map_chr(tiles_bands, function(tile_band) {
509522
tile_id <- tile_band[[1]]
510523
band <- tile_band[[2]]
511-
tile <- sits_select(cube, bands = c(band, cld_band), tiles = tile_id)
524+
tile <- .select_raster_cube(cube, bands = c(band, cld_band),
525+
tiles = tile_id
526+
)
512527
digest::digest(list(tile, samples), algo = "md5")
513528
})
514529
# recreate file names to delete them
@@ -550,7 +565,7 @@
550565
multicores,
551566
progress) {
552567
# Get cube timeline
553-
tl <- sits_timeline(cube)
568+
tl <- .cube_timeline(cube)[[1]]
554569
# transform sits tibble to sf
555570
samples_sf <- sits_as_sf(samples)
556571
# Get chunks samples
@@ -571,7 +586,7 @@
571586
on.exit(.parallel_stop(), add = TRUE)
572587
# Get the samples in parallel using tile-band combination
573588
samples_tiles_bands <- .parallel_map(chunks_samples, function(chunk) {
574-
tile <- sits_select(
589+
tile <- .select_raster_cube(
575590
data = cube,
576591
bands = c(bands, cld_band),
577592
tiles = chunk[["tile"]]
@@ -666,7 +681,7 @@
666681
# bind rows to get a melted tibble of samples
667682
ts_tbl <- dplyr::bind_rows(samples_tiles_bands)
668683
if (!.has_ts(ts_tbl)) {
669-
warning(.conf("messages", ".get_data_by_chunks"),
684+
warning(.conf("messages", ".data_by_chunks"),
670685
immediate. = TRUE, call. = FALSE
671686
)
672687
return(.tibble())
@@ -705,7 +720,7 @@
705720
dplyr::ungroup()
706721
# recreate hash values
707722
hash_bundle <- purrr::map_chr(chunks_samples, function(chunk) {
708-
tile <- sits_select(
723+
tile <- .select_raster_cube(
709724
data = cube,
710725
bands = c(bands, cld_band),
711726
tiles = chunk[["tile"]]
@@ -733,3 +748,62 @@
733748
}
734749
return(ts_tbl)
735750
}
751+
#' @title get time series from base tiles
752+
#' @name .data_base_tiles
753+
#' @keywords internal
754+
#' @noRd
755+
#' @param cube Data cube from where data is to be retrieved.
756+
#' @param samples Samples to be retrieved.
757+
#' @param ts_time Time series from multitemporal bands
758+
#'
759+
#' @return Time series information with base tile data
760+
#'
761+
.data_base_tiles <- function(cube, samples) {
762+
# retrieve values from samples
763+
#
764+
# read each tile
765+
samples <- slider::slide_dfr(cube, function(tile){
766+
# get XY
767+
xy_tb <- .proj_from_latlong(
768+
longitude = samples[["longitude"]],
769+
latitude = samples[["latitude"]],
770+
crs = .cube_crs(tile)
771+
)
772+
# join lat-long with XY values in a single tibble
773+
samples <- dplyr::bind_cols(samples, xy_tb)
774+
# filter the points inside the data cube space-time extent
775+
samples <- dplyr::filter(
776+
samples,
777+
.data[["X"]] > tile[["xmin"]],
778+
.data[["X"]] < tile[["xmax"]],
779+
.data[["Y"]] > tile[["ymin"]],
780+
.data[["Y"]] < tile[["ymax"]]
781+
)
782+
783+
# are there points to be retrieved from the cube?
784+
if (nrow(samples) == 0) {
785+
return(NULL)
786+
}
787+
# create a matrix to extract the values
788+
xy <- matrix(
789+
c(samples[["X"]], samples[["Y"]]),
790+
nrow = nrow(samples),
791+
ncol = 2
792+
)
793+
colnames(xy) <- c("X", "Y")
794+
795+
# get the values of the time series as matrix
796+
base_bands <- .tile_base_bands(tile)
797+
samples <- purrr::map_dbl(base_bands, function(band){
798+
values_base_band <- .tile_base_extract(
799+
tile = tile,
800+
band = band,
801+
xy = xy
802+
)
803+
samples[[band]] <- values_base_band
804+
return(samples)
805+
})
806+
return(samples)
807+
})
808+
}
809+

0 commit comments

Comments
 (0)