From 08f1cbda06754ca293bef2d9b7b4c872892a24bf Mon Sep 17 00:00:00 2001 From: Brian Pondi Date: Thu, 23 Nov 2023 16:02:22 +0100 Subject: [PATCH 1/2] aggregate tempoal period --- R/api.R | 1 + R/processes.R | 107 +++++++++++++----- README.md | 19 ++-- .../R-Client-truecolors-newyork.R | 8 +- examples/02-ndvi/R-Client-ndvi-amazonia.R | 21 ++-- .../R-Client-bfast-brandenburg.R | 18 +-- 6 files changed, 114 insertions(+), 60 deletions(-) diff --git a/R/api.R b/R/api.R index a356117..2146e6f 100644 --- a/R/api.R +++ b/R/api.R @@ -293,6 +293,7 @@ addEndpoint = function() { Session$assignProcess(load_collection) Session$assignProcess(load_stac) Session$assignProcess(save_result) + Session$assignProcess(aggregate_temporal_period) Session$assignProcess(filter_bands) Session$assignProcess(filter_bbox) Session$assignProcess(filter_spatial) diff --git a/R/processes.R b/R/processes.R index a871c47..b1da719 100644 --- a/R/processes.R +++ b/R/processes.R @@ -100,6 +100,15 @@ load_collection <- Process$new( ) ) ), + Parameter$new( + name = "crs", + description = "Coordinate Reference System, default = 4326", + schema = list( + type = "number", + subtype = "epsg-code" + ), + optional = TRUE + ), Parameter$new( name = "temporal_extent", description = "Limits the data to load from the collection to the specified left-closed temporal interval.", @@ -115,38 +124,10 @@ load_collection <- Process$new( type = "array" ), optional = TRUE - ), - ### Additional variables for flexibility due to gdalcubes - Parameter$new( - name = "pixels_size", - description = "size of pixels in x-direction(longitude / easting) and y-direction (latitude / northing). Default is 300", - schema = list( - type = "number" - ), - optional = TRUE - ), - Parameter$new( - name = "time_aggregation", - description = "size of pixels in time-direction, expressed as ISO8601 period string (only 1 number and unit is allowed) such as \"P16D\".Default is monthly i.e. \"P1M\".", - schema = list( - type = "string", - subtype = "duration" - ), - optional = TRUE - ), - Parameter$new( - name = "crs", - description = "Coordinate Reference System, default = 4326", - schema = list( - type = "number", - subtype = "epsg-code" - ), - optional = TRUE ) ), returns = eo_datacube, - operation = function(id, spatial_extent, temporal_extent, bands = NULL, pixels_size = 300, time_aggregation = "P1M", - crs = 4326, job) { + operation = function(id, spatial_extent, crs = 4326, temporal_extent, bands = NULL, job) { # Temporal extent preprocess t0 <- temporal_extent[[1]] t1 <- temporal_extent[[2]] @@ -205,7 +186,7 @@ load_collection <- Process$new( crs <- c("EPSG", crs) crs <- paste(crs, collapse = ":") v.overview <- gdalcubes::cube_view( - srs = crs, dx = pixels_size, dy = pixels_size, dt = time_aggregation, + srs = crs, dx = 30, dy = 30, dt = "P15D", aggregation = "median", resampling = "average", extent = list( t0 = t0, t1 = t1, @@ -362,6 +343,72 @@ load_stac <- Process$new( } ) +#' aggregate temporal period +aggregate_temporal_period <- Process$new( + id = "aggregate_temporal_period", + description = "Computes a temporal aggregation based on calendar hierarchies such as years, months or seasons.", + categories = as.array("aggregate", "cubes", "climatology"), + summary = "Temporal aggregations based on calendar hierarchies", + parameters = list( + Parameter$new( + name = "data", + description = "The source data cube.", + schema = list( + type = "object", + subtype = "raster-cube" + ) + ), + Parameter$new( + name = "period", + description = "The time intervals to aggregate", + schema = list( + type = "string" + ), + optional = FALSE + ), + Parameter$new( + name = "reducer", + description = "A reducer to be applied for the values contained in each interval. A reducer is a single process such as mean or a set of processes, which computes a single value for a list of values", + schema = list( + type = "any" + ), + optional = FALSE + ), + Parameter$new( + name = "dimension", + description = "The name of the temporal dimension for aggregation", + schema = list( + type = "any" + ), + optional = TRUE + ), + Parameter$new( + name = "context", + description = "Additional data to be passed to the reducer", + schema = list( + type = "any" + ), + optional = TRUE + ) + ), + returns = eo_datacube, + operation = function(data, period, reducer, dimension = NULL, context = NULL, job) { + dt_period <- switch(period, + week = "P7D", + month = "P1M", + year = "P1Y", + decade = "P10Y", + stop("The specified period is not supported") + ) + + message("Aggregate temporal period ...") + message("Aggregate temporal period:", dt_period, "using reducer:", reducer) + + cube <- gdalcubes::aggregate_time(cube = data, dt = dt_period, method = reducer) + message(gdalcubes::as_json(cube)) + return(cube) + } +) #' filter bands filter_bands <- Process$new( diff --git a/README.md b/README.md index 5f2246a..64e7d3d 100644 --- a/README.md +++ b/README.md @@ -126,18 +126,17 @@ datacube_init = p$load_collection(id = "sentinel-s2-l2a-cogs", south=-1027138, east=-7329987, north=-1018790), - temporal_extent = c("2021-05-01", "2022-06-30"), - # extra args for data cubes regularization -> courtesy of gdalcubes - pixels_size = 30, - time_aggregation = "P1Y", - crs = 3857) + crs = 3857, + temporal_extent = c("2021-05-01", "2022-06-30")) # filter the data cube for the desired bands datacube_filtered = p$filter_bands(data = datacube_init, bands = c("B04", "B08")) +# aggregate data cube to a year +datacube_agg = p$aggregate_temporal_period(data = datacube_filtered, period = "year", reducer = "median") # ndvi calculation -datacube_ndvi = p$ndvi(data = datacube_filtered, red = "B04", nir = "B08") +datacube_ndvi = p$ndvi(data = datacube_agg, red = "B04", nir = "B08") # supported formats formats = list_file_formats() @@ -203,7 +202,11 @@ datacube_init = p$load_collection(id = "sentinel-s2-l2a-cogs", crs = 32633) # filter the data cube for the desired bands -datacube_filtered = p$filter_bands(data = datacube_init, bands = c("B04", "B08")) +datacube_filtered = p$filter_bands(data = datacube_init, + bands = c("B04", "B08")) +# aggregate data cube to monthly +datacube_agg = p$aggregate_temporal_period(data = datacube_filtered, + period = "month", reducer = "median") # user defined R function - bfast change detection method change_detection = "function(x) { @@ -223,7 +226,7 @@ change_detection = "function(x) { }" # run udf -datacube_udf = p$run_udf(data = datacube_filtered, udf = change.detection, context = c("change_date", "change_magnitude")) +datacube_udf = p$run_udf(data = datacube_agg, udf = change.detection, context = c("change_date", "change_magnitude")) # supported formats formats = list_file_formats() diff --git a/examples/01-rgb-download/R-Client-truecolors-newyork.R b/examples/01-rgb-download/R-Client-truecolors-newyork.R index 0448a37..4733d95 100644 --- a/examples/01-rgb-download/R-Client-truecolors-newyork.R +++ b/examples/01-rgb-download/R-Client-truecolors-newyork.R @@ -26,11 +26,9 @@ datacube_init = p$load_collection(id = "sentinel-s2-l2a-cogs", south=4483092.4, east=609472, north=4530135), - temporal_extent = c("2021-06-01", "2021-06-30"), - # extra optional args for datacubes regularization -> courtesy of gdalcubes - pixels_size = 10, - time_aggregation = "P1M", - crs = 32618) + crs = 32618, + temporal_extent = c("2021-06-01", "2021-06-30") + ) # filter the data cube for the desired bands datacube_filtered = p$filter_bands(data = datacube_init, bands = c("B02","B03","B04")) diff --git a/examples/02-ndvi/R-Client-ndvi-amazonia.R b/examples/02-ndvi/R-Client-ndvi-amazonia.R index f740c0e..072563c 100644 --- a/examples/02-ndvi/R-Client-ndvi-amazonia.R +++ b/examples/02-ndvi/R-Client-ndvi-amazonia.R @@ -26,22 +26,23 @@ p = processes() # load the initial data collection and limit the amount of data loaded datacube_init = p$load_collection(id = "sentinel-s2-l2a-cogs", - spatial_extent = list(west=-7338335, - south=-1027138, - east=-7329987, - north=-1018790), - temporal_extent = c("2021-05-01", "2022-06-30"), - # extra optional args for datacubes regularization -> courtesy of gdalcubes - pixels_size = 30, - time_aggregation = "P1Y", - crs = 3857) + spatial_extent = list(west=-7338335, + south=-1027138, + east=-7329987, + north=-1018790), + crs = 3857, + temporal_extent = c("2021-05-01", "2022-06-30")) + + # filter the data cube for the desired bands datacube_filtered = p$filter_bands(data = datacube_init, bands = c("B04", "B08")) +# aggregate data cube to a year +datacube_agg = p$aggregate_temporal_period(data = datacube_filtered, period = "year", reducer = "median") # ndvi calculation -datacube_ndvi = p$ndvi(data = datacube_filtered, red = "B04", nir = "B08") +datacube_ndvi = p$ndvi(data = datacube_agg, red = "B04", nir = "B08") # supported formats formats = list_file_formats() diff --git a/examples/03-change-detection/R-Client-bfast-brandenburg.R b/examples/03-change-detection/R-Client-bfast-brandenburg.R index 8bc32de..c183edb 100644 --- a/examples/03-change-detection/R-Client-bfast-brandenburg.R +++ b/examples/03-change-detection/R-Client-bfast-brandenburg.R @@ -31,17 +31,21 @@ datacube_init = p$load_collection(id = "sentinel-s2-l2a-cogs", east=422094.8, north=5807036.1), temporal_extent = c("2016-01-01", "2020-12-31"), - # extra optional args for datacubes regularization -> courtesy of gdalcubes + # extra args for data cubes regularization pixels_size = 10, time_aggregation = "P1M", crs = 32633) # filter the data cube for the desired bands -datacube_filtered = p$filter_bands(data = datacube_init, bands = c("B04", "B08")) +datacube_filtered = p$filter_bands(data = datacube_init, + bands = c("B04", "B08")) +# aggregate data cube to monthly +datacube_agg = p$aggregate_temporal_period(data = datacube_filtered, + period = "month", reducer = "median") -# bfast custom change detection method -change.detection = 'function(x) { - knr <- exp(-((x["B08",]/10000)-(x["B04",]/10000))^2/(2)) +# user defined R function - bfast change detection method +change_detection = "function(x) { + knr <- exp(-((x[\"B08\",]/10000)-(x[\"B04\",]/10000))^2/(2)) kndvi <- (1-knr) / (1+knr) if (all(is.na(kndvi))) { return(c(NA,NA)) @@ -54,10 +58,10 @@ change.detection = 'function(x) { }, error = function(x) { return(c(NA,NA)) }) - }' + }" # run udf -datacube_udf = p$run_udf(data = datacube_filtered, udf = change.detection, context = c("change_date", "change_magnitude")) +datacube_udf = p$run_udf(data = datacube_agg, udf = change.detection, context = c("change_date", "change_magnitude")) # supported formats formats = list_file_formats() From 02c678864a276530fd64a33a9aba9dacb3e1c8e3 Mon Sep 17 00:00:00 2001 From: Brian Pondi Date: Thu, 23 Nov 2023 16:05:03 +0100 Subject: [PATCH 2/2] aggregate temporal period --- README.md | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 64e7d3d..4a12856 100644 --- a/README.md +++ b/README.md @@ -195,11 +195,8 @@ datacube_init = p$load_collection(id = "sentinel-s2-l2a-cogs", south=5803577.5, east=422094.8, north=5807036.1), - temporal_extent = c("2016-01-01", "2020-12-31"), - # extra args for data cubes regularization - pixels_size = 10, - time_aggregation = "P1M", - crs = 32633) + crs = 32633, + temporal_extent = c("2016-01-01", "2020-12-31")) # filter the data cube for the desired bands datacube_filtered = p$filter_bands(data = datacube_init,