diff --git a/openapi.json b/openapi.json index c7e7a3838..bb66c7fb8 100644 --- a/openapi.json +++ b/openapi.json @@ -806,11 +806,72 @@ } }, "/dataset/{dataset}/tiles": { + "get": { + "tags": [ + "Datasets" + ], + "summary": "Retrieves details about a dataset using the internal name.", + "operationId": "get_dataset_tiles_handler", + "parameters": [ + { + "name": "dataset", + "in": "path", + "description": "Dataset Name", + "required": true, + "schema": { + "$ref": "#/components/schemas/DatasetName" + } + }, + { + "name": "offset", + "in": "query", + "required": true, + "schema": { + "type": "integer", + "format": "int32", + "minimum": 0 + } + }, + { + "name": "limit", + "in": "query", + "required": true, + "schema": { + "type": "integer", + "format": "int32", + "minimum": 0 + } + } + ], + "responses": { + "200": { + "description": "OK", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "$ref": "#/components/schemas/DatasetTile" + } + } + } + } + }, + "401": { + "$ref": "#/components/responses/UnauthorizedUserResponse" + } + }, + "security": [ + { + "session_token": [] + } + ] + }, "post": { "tags": [ "Datasets" ], - "summary": "Add a tile to a gdal dataset.", + "summary": "Add tiles to a gdal dataset.", "operationId": "add_dataset_tiles_handler", "parameters": [ { @@ -827,7 +888,10 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/AutoCreateDataset" + "type": "array", + "items": { + "$ref": "#/components/schemas/AddDatasetTile" + } } } }, @@ -835,7 +899,110 @@ }, "responses": { "200": { - "description": "" + "description": "OK", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "$ref": "#/components/schemas/DatasetTileId" + } + } + } + } + } + }, + "security": [ + { + "session_token": [] + } + ] + }, + "delete": { + "tags": [ + "Datasets" + ], + "summary": "Retrieves details about a dataset using the internal name.", + "operationId": "delete_dataset_tiles_handler", + "parameters": [ + { + "name": "dataset", + "in": "path", + "description": "Dataset Name", + "required": true, + "schema": { + "$ref": "#/components/schemas/DatasetName" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DeleteDatasetTiles" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "OK" + }, + "401": { + "$ref": "#/components/responses/UnauthorizedUserResponse" + } + }, + "security": [ + { + "session_token": [] + } + ] + } + }, + "/dataset/{dataset}/tiles/{tile}": { + "put": { + "tags": [ + "Datasets" + ], + "summary": "Retrieves details about a dataset using the internal name.", + "operationId": "update_dataset_tile_handler", + "parameters": [ + { + "name": "dataset", + "in": "path", + "description": "Dataset Name", + "required": true, + "schema": { + "$ref": "#/components/schemas/DatasetName" + } + }, + { + "name": "tile", + "in": "path", + "description": "Tile Id", + "required": true, + "schema": { + "$ref": "#/components/schemas/DatasetTileId" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UpdateDatasetTile" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "OK" + }, + "401": { + "$ref": "#/components/responses/UnauthorizedUserResponse" } }, "security": [ @@ -6240,12 +6407,65 @@ } } }, + "DatasetTile": { + "type": "object", + "required": [ + "id", + "time", + "spatial_partition", + "band", + "z_index", + "params" + ], + "properties": { + "band": { + "type": "integer", + "format": "int32", + "minimum": 0 + }, + "id": { + "$ref": "#/components/schemas/DatasetTileId" + }, + "params": { + "$ref": "#/components/schemas/GdalDatasetParameters" + }, + "spatial_partition": { + "$ref": "#/components/schemas/SpatialPartition2D" + }, + "time": { + "$ref": "#/components/schemas/TimeInterval" + }, + "z_index": { + "type": "integer", + "format": "int32", + "minimum": 0 + } + } + }, + "DatasetTileId": { + "type": "string", + "format": "uuid" + }, "DateTimeParseFormat": { "type": "string" }, "DateTimeString": { "type": "string" }, + "DeleteDatasetTiles": { + "type": "object", + "required": [ + "tileIds" + ], + "properties": { + "tileIds": { + "type": "array", + "items": { + "$ref": "#/components/schemas/DatasetTileId" + } + } + } + }, "DerivedColor": { "type": "object", "required": [ @@ -10287,6 +10507,37 @@ } } }, + "UpdateDatasetTile": { + "type": "object", + "required": [ + "time", + "spatial_partition", + "band", + "z_index", + "params" + ], + "properties": { + "band": { + "type": "integer", + "format": "int32", + "minimum": 0 + }, + "params": { + "$ref": "#/components/schemas/GdalDatasetParameters" + }, + "spatial_partition": { + "$ref": "#/components/schemas/SpatialPartition2D" + }, + "time": { + "$ref": "#/components/schemas/TimeInterval" + }, + "z_index": { + "type": "integer", + "format": "int32", + "minimum": 0 + } + } + }, "UpdateLayer": { "type": "object", "required": [ diff --git a/services/src/api/apidoc.rs b/services/src/api/apidoc.rs index 0f481d61f..10034449b 100644 --- a/services/src/api/apidoc.rs +++ b/services/src/api/apidoc.rs @@ -1,7 +1,9 @@ #![allow(clippy::needless_for_each)] // TODO: remove when clippy is fixed for utoipa use crate::api::handlers; -use crate::api::handlers::datasets::{AddDatasetTile, VolumeFileLayersResponse}; +use crate::api::handlers::datasets::{ + AddDatasetTile, DatasetTile, DeleteDatasetTiles, UpdateDatasetTile, VolumeFileLayersResponse, +}; use crate::api::handlers::permissions::{ PermissionListOptions, PermissionListing, PermissionRequest, Resource, }; @@ -68,6 +70,7 @@ use crate::api::{ }; use crate::contexts::SessionId; use crate::datasets::listing::{DatasetListing, OrderBy}; +use crate::datasets::postgres::DatasetTileId; use crate::datasets::storage::{AutoCreateDataset, SuggestMetaData}; use crate::datasets::upload::{UploadId, VolumeName}; use crate::datasets::{DatasetName, RasterDatasetFromWorkflow, RasterDatasetFromWorkflowResult}; @@ -118,6 +121,9 @@ use utoipa::{Modify, OpenApi}; handlers::datasets::update_dataset_symbology_handler, handlers::datasets::update_loading_info_handler, handlers::datasets::add_dataset_tiles_handler, + handlers::datasets::get_dataset_tiles_handler, + handlers::datasets::update_dataset_tile_handler, + handlers::datasets::delete_dataset_tiles_handler, handlers::layers::add_collection, handlers::layers::add_existing_collection_to_collection, handlers::layers::add_existing_layer_to_collection, @@ -406,6 +412,10 @@ use utoipa::{Modify, OpenApi}; VolumeName, DataPath, AddDatasetTile, + DatasetTile, + DatasetTileId, + UpdateDatasetTile, + DeleteDatasetTiles, PlotOutputFormat, WrappedPlotOutput, diff --git a/services/src/api/handlers/datasets.rs b/services/src/api/handlers/datasets.rs index 2c007de46..e99053e71 100755 --- a/services/src/api/handlers/datasets.rs +++ b/services/src/api/handlers/datasets.rs @@ -15,6 +15,7 @@ use crate::{ datasets::{ DatasetName, listing::{DatasetListOptions, DatasetListing, DatasetProvider}, + postgres::DatasetTileId, storage::{AutoCreateDataset, DatasetStore, SuggestMetaData}, upload::{AdjustFilePath, Upload, UploadDb, UploadId, UploadRootPath, VolumeName, Volumes}, }, @@ -28,7 +29,7 @@ use crate::{ }; use actix_web::{ FromRequest, HttpResponse, HttpResponseBuilder, Responder, - web::{self, Json}, + web::{self, Json, Query}, }; use gdal::{ DatasetOptions, @@ -63,7 +64,8 @@ use std::{ convert::{TryFrom, TryInto}, path::Path, }; -use utoipa::{ToResponse, ToSchema}; +use utoipa::{IntoParams, ToResponse, ToSchema}; +use validator::Validate; pub(crate) fn init_dataset_routes(cfg: &mut web::ServiceConfig) where @@ -94,9 +96,15 @@ where web::resource("/{dataset}/provenance") .route(web::put().to(update_dataset_provenance_handler::)), ) + .service( + web::resource("/{dataset}/tiles/{tile}") + .route(web::put().to(update_dataset_tile_handler::)), + ) .service( web::resource("/{dataset}/tiles") - .route(web::post().to(add_dataset_tiles_handler::)), + .route(web::post().to(add_dataset_tiles_handler::)) + .route(web::get().to(get_dataset_tiles_handler::)) + .route(web::delete().to(delete_dataset_tiles_handler::)), ) .service( web::resource("/{dataset}") @@ -188,14 +196,14 @@ pub async fn list_datasets_handler( Ok(web::Json(list)) } -/// Add a tile to a gdal dataset. +/// Add tiles to a gdal dataset. #[utoipa::path( tag = "Datasets", post, path = "/dataset/{dataset}/tiles", - request_body = AutoCreateDataset, + request_body = [AddDatasetTile], responses( - (status = 200), + (status = 200, description = "OK", body = [DatasetTileId]), ), params( ("dataset" = DatasetName, description = "Dataset Name"), @@ -246,7 +254,6 @@ pub async fn add_dataset_tiles_handler( &dataset .data_path .ok_or(AddDatasetTilesError::DatasetIsMissingDataPath)?, - &session_context, ) .context(CannotAddTilesToDataset)?; @@ -254,11 +261,12 @@ pub async fn add_dataset_tiles_handler( validate_tile(tile, &data_path_file_path, &dataset_descriptor)?; } - db.add_dataset_tiles(dataset_id, tiles) + let tile_ids = db + .add_dataset_tiles(dataset_id, tiles) .await .context(CannotAddTilesToDataset)?; - Ok(HttpResponse::Ok().finish()) + Ok(HttpResponse::Ok().json(tile_ids)) } fn validate_tile( @@ -345,24 +353,17 @@ fn validate_tile( Ok(()) } -fn file_path_from_data_path( - data_path: &DataPath, - session_context: &T, -) -> Result { +fn file_path_from_data_path(data_path: &DataPath) -> Result { Ok(match data_path { - DataPath::Volume(volume_name) => session_context - .volumes()? + DataPath::Volume(volume_name) => Volumes::default() + .volumes .iter() - .find(|v| v.name == volume_name.0) + .find(|v| v.name == *volume_name) .ok_or(Error::UnknownVolumeName { volume_name: volume_name.0.clone(), })? .path - .clone() - .ok_or(Error::CannotAccessVolumePath { - volume_name: volume_name.0.clone(), - })? - .into(), + .clone(), DataPath::Upload(upload_id) => upload_id.root_path()?, }) } @@ -445,6 +446,185 @@ pub async fn get_dataset_handler( Ok(web::Json(dataset)) } +#[derive(Clone, Serialize, Deserialize, PartialEq, Debug, ToSchema)] +pub struct DatasetTile { + pub id: DatasetTileId, + pub time: crate::api::model::datatypes::TimeInterval, + pub spatial_partition: SpatialPartition2D, + pub band: u32, + pub z_index: u32, + pub params: GdalDatasetParameters, +} + +#[derive(Debug, Deserialize, IntoParams, Validate)] +pub struct GetDatasetTilesParams { + pub offset: u32, + #[validate(range(min = 1, max = 100))] + pub limit: u32, + // TODO: filter by time, space, filename, ... +} + +/// Retrieves details about a dataset using the internal name. +#[utoipa::path( + tag = "Datasets", + get, + path = "/dataset/{dataset}/tiles", + responses( + (status = 200, description = "OK", body = Vec), + (status = 401, response = crate::api::model::responses::UnauthorizedUserResponse) + ), + params( + ("dataset" = DatasetName, description = "Dataset Name"), + GetDatasetTilesParams + ), + security( + ("session_token" = []) + ) +)] +pub async fn get_dataset_tiles_handler( + dataset: web::Path, + session: C::Session, + params: Query, + app_ctx: web::Data, +) -> Result { + let session_ctx = app_ctx.session_context(session).db(); + + let real_dataset = dataset.into_inner(); + + let dataset_id = session_ctx + .resolve_dataset_name_to_id(&real_dataset) + .await + .context(CannotLoadDatasetForGettingTiles)?; + + // handle the case where the dataset name is not known + let dataset_id = dataset_id + .ok_or(error::Error::UnknownDatasetName { + dataset_name: real_dataset.to_string(), + }) + .context(CannotLoadDatasetForGettingTiles)?; + + let tiles = session_ctx + .get_dataset_tiles(dataset_id, ¶ms.into_inner()) + .await + .context(CannotLoadDatasetTiles)?; + + Ok(web::Json(tiles)) +} + +#[derive(Clone, Serialize, Deserialize, PartialEq, Debug, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct DeleteDatasetTiles { + pub tile_ids: Vec, +} + +/// Retrieves details about a dataset using the internal name. +#[utoipa::path( + tag = "Datasets", + delete, + path = "/dataset/{dataset}/tiles", + request_body = DeleteDatasetTiles, + responses( + (status = 200, description = "OK"), + (status = 401, response = crate::api::model::responses::UnauthorizedUserResponse) + ), + params( + ("dataset" = DatasetName, description = "Dataset Name"), + ), + security( + ("session_token" = []) + ) +)] +pub async fn delete_dataset_tiles_handler( + dataset: web::Path, + session: C::Session, + delete: web::Json, + app_ctx: web::Data, +) -> Result { + let session_ctx = app_ctx.session_context(session).db(); + + let real_dataset = dataset.into_inner(); + + let dataset_id = session_ctx + .resolve_dataset_name_to_id(&real_dataset) + .await + .context(CannotLoadDatasetForGettingTiles)?; + + // handle the case where the dataset name is not known + let dataset_id = dataset_id + .ok_or(error::Error::UnknownDatasetName { + dataset_name: real_dataset.to_string(), + }) + .context(CannotLoadDatasetForGettingTiles)?; + + session_ctx + .delete_dataset_tiles(dataset_id, delete.into_inner().tile_ids) + .await + .context(CannotLoadDatasetTiles)?; + + Ok(HttpResponse::Ok()) +} + +#[derive(Clone, Serialize, Deserialize, PartialEq, Debug, ToSchema)] +pub struct UpdateDatasetTile { + pub time: crate::api::model::datatypes::TimeInterval, + pub spatial_partition: SpatialPartition2D, + pub band: u32, + pub z_index: u32, + pub params: GdalDatasetParameters, +} + +/// Retrieves details about a dataset using the internal name. +#[utoipa::path( + tag = "Datasets", + put, + path = "/dataset/{dataset}/tiles/{tile}", + request_body = UpdateDatasetTile, + responses( + (status = 200, description = "OK"), + (status = 401, response = crate::api::model::responses::UnauthorizedUserResponse) + ), + params( + ("dataset" = DatasetName, description = "Dataset Name"), + ("tile" = DatasetTileId, description = "Tile Id"), + ), + security( + ("session_token" = []) + ) +)] +pub async fn update_dataset_tile_handler( + dataset: web::Path<(DatasetName, DatasetTileId)>, + session: C::Session, + tile: web::Json, + app_ctx: web::Data, +) -> Result { + let session_ctx = app_ctx.session_context(session).db(); + + let (dataset, tile_id) = dataset.into_inner(); + + let real_dataset = dataset; + + let dataset_id = session_ctx + .resolve_dataset_name_to_id(&real_dataset) + .await + .context(CannotLoadDatasetForUpdatingTile)?; + + // handle the case where the dataset name is not known + let dataset_id = dataset_id + .ok_or(error::Error::UnknownDatasetName { + dataset_name: real_dataset.to_string(), + }) + .context(CannotLoadDatasetForUpdatingTile)?; + + // TODO: validate the tile like in add tiles + + session_ctx + .update_dataset_tile(dataset_id, tile_id, tile.into_inner()) + .await + .context(CannotUpdateDatasetTile)?; + + Ok(HttpResponse::Ok()) +} + /// Update details about a dataset using the internal name. #[utoipa::path( tag = "Datasets", @@ -490,6 +670,11 @@ pub async fn update_dataset_handler( }) .context(CannotLoadDatasetForUpdate)?; + // TODO: if the data_path is changed, validate that + // - it exists + // - it is accessible + // - all files referenced by the dataset still exist? + session_ctx .update_dataset(dataset_id, update.into_inner()) .await @@ -1627,7 +1812,10 @@ mod tests { }, raster::{GridShape2D, TilingSpecification}, spatial_reference::SpatialReferenceOption, - util::{assert_image_equals, test::assert_eq_two_list_of_tiles}, + util::{ + assert_image_equals, + test::{TestDefault, assert_eq_two_list_of_tiles}, + }, }; use geoengine_operators::{ engine::{ @@ -3070,6 +3258,7 @@ mod tests { display_name: "new display name".to_string(), description: "new description".to_string(), tags: vec!["foo".to_string(), "bar".to_string()], + data_path: Some(DataPath::test_default()), }; let req = actix_web::test::TestRequest::post() @@ -3088,6 +3277,7 @@ mod tests { assert_eq!(dataset.display_name, update.display_name); assert_eq!(dataset.description, update.description); assert_eq!(dataset.tags, Some(update.tags)); + assert_eq!(dataset.data_path, update.data_path); Ok(()) } @@ -3542,6 +3732,8 @@ mod tests { let res = send_test_request(req, app_ctx.clone()).await; assert_eq!(res.status(), 200, "response: {res:?}"); + let tile_ids: Vec = actix_web::test::read_body_json(res).await; + assert_eq!(tile_ids.len(), tiles.len()); // create workflow let workflow = Workflow { @@ -5403,4 +5595,163 @@ mod tests { Ok(()) } + + #[ge_context::test] + #[allow(clippy::too_many_lines)] + async fn it_gets_and_updates_and_deletes_tiles(app_ctx: PostgresContext) -> Result<()> { + let volume = VolumeName("test_data".to_string()); + + // add data + let create = CreateDataset { + data_path: DataPath::Volume(volume.clone()), + definition: DatasetDefinition { + properties: AddDataset { + name: None, + display_name: "ndvi (tiled)".to_string(), + description: "ndvi".to_string(), + source_operator: "MultiBandGdalSource".to_string(), + symbology: None, + provenance: None, + tags: Some(vec!["upload".to_owned(), "test".to_owned()]), + }, + meta_data: MetaDataDefinition::GdalMultiBand(GdalMultiBand { + r#type: Default::default(), + result_descriptor: create_ndvi_result_descriptor(true).into(), + }), + }, + }; + + let session = admin_login(&app_ctx).await; + let ctx = app_ctx.session_context(session.clone()); + + let db = ctx.db(); + + let req = actix_web::test::TestRequest::post() + .uri("/dataset") + .append_header((header::CONTENT_LENGTH, 0)) + .append_header((header::AUTHORIZATION, Bearer::new(session.id().to_string()))) + .append_header((header::CONTENT_TYPE, "application/json")) + .set_payload(serde_json::to_string(&create)?); + let res = send_test_request(req, app_ctx.clone()).await; + + let DatasetNameResponse { dataset_name } = actix_web::test::read_body_json(res).await; + let dataset_id = db + .resolve_dataset_name_to_id(&dataset_name) + .await + .unwrap() + .unwrap(); + + assert!(db.load_dataset(&dataset_id).await.is_ok()); + + // add tile + let tiles = create_ndvi_tiles()[0..1].to_vec(); + + let req = actix_web::test::TestRequest::post() + .uri(&format!("/dataset/{dataset_name}/tiles")) + .append_header((header::CONTENT_LENGTH, 0)) + .append_header((header::AUTHORIZATION, Bearer::new(session.id().to_string()))) + .append_header((header::CONTENT_TYPE, "application/json")) + .set_payload(serde_json::to_string(&tiles)?); + + let res = send_test_request(req, app_ctx.clone()).await; + assert_eq!( + res.status(), + 200, + "response: {read_body}", + read_body = actix_web::test::read_body_json::(res).await + ); + + // get tile + let req = actix_web::test::TestRequest::get() + .uri(&format!("/dataset/{dataset_name}/tiles?offset=0&limit=10")) + .append_header((header::AUTHORIZATION, Bearer::new(session.id().to_string()))); + + let res = send_test_request(req, app_ctx.clone()).await; + assert_eq!( + res.status(), + 200, + "response: {read_body}", + read_body = actix_web::test::read_body_json::(res).await + ); + + let returned_tiles: Vec = actix_web::test::read_body_json(res).await; + assert_eq!(returned_tiles.len(), 1); + assert_eq!( + returned_tiles[0], + DatasetTile { + id: returned_tiles[0].id, + time: tiles[0].time, + spatial_partition: tiles[0].spatial_partition, + band: tiles[0].band, + z_index: tiles[0].z_index, + params: tiles[0].params.clone() + } + ); + + let update_tile = UpdateDatasetTile { + time: tiles[0].time, + spatial_partition: tiles[0].spatial_partition, + band: tiles[0].band, + z_index: tiles[0].z_index + 1, + params: tiles[0].params.clone(), + }; + + let req = actix_web::test::TestRequest::put() + .uri(&format!( + "/dataset/{dataset_name}/tiles/{}", + returned_tiles[0].id + )) + .append_header((header::AUTHORIZATION, Bearer::new(session.id().to_string()))) + .append_header((header::CONTENT_TYPE, "application/json")) + .set_payload(serde_json::to_string(&update_tile)?); + + let res = send_test_request(req, app_ctx.clone()).await; + assert_eq!(res.status(), 200, "response: {res:?}"); + + let req = actix_web::test::TestRequest::get() + .uri(&format!("/dataset/{dataset_name}/tiles?offset=0&limit=10")) + .append_header((header::AUTHORIZATION, Bearer::new(session.id().to_string()))); + + let res = send_test_request(req, app_ctx.clone()).await; + assert_eq!(res.status(), 200, "response: {res:?}"); + + let returned_tiles: Vec = actix_web::test::read_body_json(res).await; + assert_eq!(returned_tiles.len(), 1); + assert_eq!( + returned_tiles[0], + DatasetTile { + id: returned_tiles[0].id, + time: tiles[0].time, + spatial_partition: tiles[0].spatial_partition, + band: tiles[0].band, + z_index: tiles[0].z_index + 1, + params: tiles[0].params.clone() + } + ); + + let update_tile = DeleteDatasetTiles { + tile_ids: vec![returned_tiles[0].id], + }; + + let req = actix_web::test::TestRequest::delete() + .uri(&format!("/dataset/{dataset_name}/tiles",)) + .append_header((header::AUTHORIZATION, Bearer::new(session.id().to_string()))) + .append_header((header::CONTENT_TYPE, "application/json")) + .set_payload(serde_json::to_string(&update_tile)?); + + let res = send_test_request(req, app_ctx.clone()).await; + assert_eq!(res.status(), 200, "response: {res:?}"); + + let req = actix_web::test::TestRequest::get() + .uri(&format!("/dataset/{dataset_name}/tiles?offset=0&limit=10")) + .append_header((header::AUTHORIZATION, Bearer::new(session.id().to_string()))); + + let res = send_test_request(req, app_ctx.clone()).await; + assert_eq!(res.status(), 200, "response: {res:?}"); + + let returned_tiles: Vec = actix_web::test::read_body_json(res).await; + assert_eq!(returned_tiles.len(), 0); + + Ok(()) + } } diff --git a/services/src/api/model/responses/datasets/errors.rs b/services/src/api/model/responses/datasets/errors.rs index 94ccb5ba4..202f2485a 100644 --- a/services/src/api/model/responses/datasets/errors.rs +++ b/services/src/api/model/responses/datasets/errors.rs @@ -176,3 +176,51 @@ impl fmt::Debug for AddDatasetTilesError { write!(f, "{}", ge_report(self)) } } + +#[derive(Snafu, IntoStaticStr)] +#[snafu(visibility(pub(crate)))] +#[snafu(context(suffix(false)))] // disables default `Snafu` suffix +pub enum GetDatasetTilesError { + CannotLoadDatasetForGettingTiles { source: error::Error }, + CannotLoadDatasetTiles { source: error::Error }, +} + +impl ResponseError for GetDatasetTilesError { + fn error_response(&self) -> HttpResponse { + HttpResponse::build(self.status_code()).json(ErrorResponse::from(self)) + } + + fn status_code(&self) -> StatusCode { + StatusCode::BAD_REQUEST + } +} + +impl fmt::Debug for GetDatasetTilesError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", ge_report(self)) + } +} + +#[derive(Snafu, IntoStaticStr)] +#[snafu(visibility(pub(crate)))] +#[snafu(context(suffix(false)))] // disables default `Snafu` suffix +pub enum UpdateDatasetTileError { + CannotLoadDatasetForUpdatingTile { source: error::Error }, + CannotUpdateDatasetTile { source: error::Error }, +} + +impl ResponseError for UpdateDatasetTileError { + fn error_response(&self) -> HttpResponse { + HttpResponse::build(self.status_code()).json(ErrorResponse::from(self)) + } + + fn status_code(&self) -> StatusCode { + StatusCode::BAD_REQUEST + } +} + +impl fmt::Debug for UpdateDatasetTileError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", ge_report(self)) + } +} diff --git a/services/src/api/model/services.rs b/services/src/api/model/services.rs index a4a54de3a..473262726 100644 --- a/services/src/api/model/services.rs +++ b/services/src/api/model/services.rs @@ -156,7 +156,7 @@ pub struct CreateDataset { pub definition: DatasetDefinition, } -#[derive(Deserialize, Serialize, Debug, Clone, ToSchema)] +#[derive(Deserialize, Serialize, Debug, Clone, ToSchema, PartialEq, Eq)] #[serde(rename_all = "camelCase")] pub enum DataPath { Volume(VolumeName), @@ -170,6 +170,7 @@ impl TestDefault for DataPath { } #[derive(Deserialize, Serialize, Debug, Clone, ToSchema, Validate)] +#[serde(rename_all = "camelCase")] pub struct UpdateDataset { pub name: DatasetName, #[validate(length(min = 1))] @@ -177,6 +178,7 @@ pub struct UpdateDataset { pub description: String, #[validate(custom(function = "validate_tags"))] pub tags: Vec, + pub data_path: Option, // TODO: make mandatory } #[derive(Deserialize, Serialize, PartialEq, Eq, Debug, Clone, ToSchema, Validate)] diff --git a/services/src/datasets/postgres.rs b/services/src/datasets/postgres.rs index 9ba77a08f..96037734d 100644 --- a/services/src/datasets/postgres.rs +++ b/services/src/datasets/postgres.rs @@ -1,6 +1,8 @@ use std::path::PathBuf; -use crate::api::handlers::datasets::AddDatasetTile; +use crate::api::handlers::datasets::{ + AddDatasetTile, DatasetTile, GetDatasetTilesParams, UpdateDatasetTile, +}; use crate::api::model::datatypes::SpatialPartition2D; use crate::api::model::services::{DataPath, UpdateDataset}; use crate::contexts::PostgresDb; @@ -1169,13 +1171,14 @@ where .boxed_context(crate::error::PermissionDb)?; tx.execute( - "UPDATE datasets SET name = $2, display_name = $3, description = $4, tags = $5 WHERE id = $1;", + "UPDATE datasets SET name = $2, display_name = $3, description = $4, tags = $5, data_path = $6 WHERE id = $1;", &[ &dataset, &update.name, &update.display_name, &update.description, &update.tags, + &update.data_path, ], ) .await?; @@ -1301,7 +1304,7 @@ where &self, dataset: DatasetId, tiles: Vec, - ) -> Result<()> { + ) -> Result> { let mut conn = self.conn_pool.get().await?; let tx = conn.build_transaction().start().await?; @@ -1313,12 +1316,124 @@ where validate_z_index(&tx, dataset, &tiles).await?; - batch_insert_tiles(&tx, dataset, &tiles).await?; + let tile_ids = batch_insert_tiles(&tx, dataset, &tiles).await?; update_dataset_extents(&tx, dataset, &tiles).await?; tx.commit().await?; + Ok(tile_ids) + } + + async fn get_dataset_tiles( + &self, + dataset: DatasetId, + params: &GetDatasetTilesParams, + ) -> Result> { + let mut conn = self.conn_pool.get().await?; + let tx = conn.build_transaction().start().await?; + + self.ensure_permission_in_tx(dataset.into(), Permission::Read, &tx) + .await + .boxed_context(crate::error::PermissionDb)?; + + let rows = tx + .query( + " + SELECT + id, time, bbox, band, z_index, gdal_params + FROM + dataset_tiles + WHERE + dataset_id = $1 + ORDER BY + (time).start, + band, + (bbox).upper_left_coordinate.x, + (bbox).upper_left_coordinate.y, + z_index + OFFSET $2 + LIMIT $3", + &[ + &dataset, + &i64::from(params.offset), + &i64::from(params.limit), + ], + ) + .await?; + + let tiles: Vec = rows + .into_iter() + .map(|row| DatasetTile { + id: row.get(0), + time: row.get(1), + spatial_partition: row.get(2), + band: row.get(3), + z_index: row.get(4), + params: row.get::<_, GdalDatasetParameters>(5).into(), + }) + .collect(); + + Ok(tiles) + } + + async fn update_dataset_tile( + &self, + dataset: DatasetId, + tile_id: DatasetTileId, + tile: UpdateDatasetTile, + ) -> Result<()> { + let mut conn = self.conn_pool.get().await?; + let tx = conn.build_transaction().start().await?; + + self.ensure_permission_in_tx(dataset.into(), Permission::Read, &tx) + .await + .boxed_context(crate::error::PermissionDb)?; + + tx.query( + " + UPDATE dataset_tiles + SET dataset_id = $2, time = $3, bbox = $4, band = $5, z_index = $6, gdal_params = $7 + WHERE id = $1;", + &[ + &tile_id, + &dataset, + &tile.time, + &tile.spatial_partition, + &tile.band, + &tile.z_index, + &(GdalDatasetParameters::from(tile.params)), + ], + ) + .await?; + + tx.commit().await?; + + Ok(()) + } + + async fn delete_dataset_tiles( + &self, + dataset: DatasetId, + tile_ids: Vec, + ) -> Result<()> { + let mut conn = self.conn_pool.get().await?; + let tx = conn.build_transaction().start().await?; + + self.ensure_permission_in_tx(dataset.into(), Permission::Owner, &tx) + .await + .boxed_context(crate::error::PermissionDb)?; + + tx.execute( + " + DELETE FROM dataset_tiles + WHERE dataset_id = $1 AND id = ANY($2);", + &[&dataset, &tile_ids], + ) + .await?; + + tx.commit().await?; + Ok(()) } } @@ -1439,7 +1554,7 @@ async fn batch_insert_tiles( tx: &Transaction<'_>, dataset: DatasetId, tiles: &[AddDatasetTile], -) -> Result<()> { +) -> Result> { // batch insert using array unnesting let tile_entries = tiles .iter() @@ -1454,16 +1569,20 @@ async fn batch_insert_tiles( }) .collect::>(); - tx.execute( - r#" + let rows = tx + .query( + r#" INSERT INTO dataset_tiles (id, dataset_id, time, bbox, band, z_index, gdal_params) - SELECT * FROM unnest($1::"TileEntry"[]); + SELECT * FROM unnest($1::"TileEntry"[]) + RETURNING id; "#, - &[&tile_entries], - ) - .await?; + &[&tile_entries], + ) + .await?; - Ok(()) + let tile_ids = rows.into_iter().map(|row| row.get(0)).collect(); + + Ok(tile_ids) } async fn update_dataset_extents( diff --git a/services/src/datasets/storage.rs b/services/src/datasets/storage.rs index d96404566..d18f0ca0a 100755 --- a/services/src/datasets/storage.rs +++ b/services/src/datasets/storage.rs @@ -1,9 +1,12 @@ use super::listing::Provenance; use super::postgres::DatasetMetaData; use super::{DatasetIdAndName, DatasetName}; -use crate::api::handlers::datasets::AddDatasetTile; +use crate::api::handlers::datasets::{ + AddDatasetTile, DatasetTile, GetDatasetTilesParams, UpdateDatasetTile, +}; use crate::api::model::services::{DataPath, UpdateDataset}; use crate::datasets::listing::{DatasetListing, DatasetProvider}; +use crate::datasets::postgres::DatasetTileId; use crate::datasets::upload::UploadDb; use crate::datasets::upload::UploadId; use crate::error::Result; @@ -321,6 +324,28 @@ pub trait DatasetStore { async fn delete_dataset(&self, dataset: DatasetId) -> Result<()>; - async fn add_dataset_tiles(&self, dataset: DatasetId, tiles: Vec) - -> Result<()>; + async fn add_dataset_tiles( + &self, + dataset: DatasetId, + tiles: Vec, + ) -> Result>; + + async fn get_dataset_tiles( + &self, + dataset: DatasetId, + params: &GetDatasetTilesParams, + ) -> Result>; + + async fn update_dataset_tile( + &self, + dataset: DatasetId, + tile_id: DatasetTileId, + tile: UpdateDatasetTile, + ) -> Result<()>; + + async fn delete_dataset_tiles( + &self, + dataset: DatasetId, + tile_ids: Vec, + ) -> Result<()>; } diff --git a/test_data/api_calls/multi_tile.http b/test_data/api_calls/multi_tile.http index fe1da5d59..ac67709b1 100644 --- a/test_data/api_calls/multi_tile.http +++ b/test_data/api_calls/multi_tile.http @@ -1,14 +1,19 @@ # @name anonymousSession -POST http://localhost:3030/api/anonymous +POST http://localhost:3030/api/login Content-Type: application/json +{ + "email": "admin@localhost", + "password": "adminadmin" +} + ### # @name dataset POST http://localhost:3030/api/dataset Content-Type: application/json Authorization: Bearer {{anonymousSession.response.body.$.id}} -< ../raster/multi_tile/metadata/dataset.json +< ../raster/multi_tile/metadata/dataset_irregular.json ### @@ -44,4 +49,32 @@ Authorization: Bearer {{anonymousSession.response.body.$.id}} GET http://localhost:3030/api/wms/{{workflow.response.body.$.id}}?REQUEST=GetMap&SERVICE=WMS&VERSION=1.3.0&FORMAT=image%2Fpng&STYLES=custom%3A{{colorizer}}&TRANSPARENT=true&layers={{workflow.response.body.$.id}}&time=2025-01-01T00%3A00%3A00.000Z&EXCEPTIONS=application%2Fjson&WIDTH=1800&HEIGHT=900&CRS=EPSG%3A4326&BBOX=-90%2C-180%2C90%2C180 Authorization: Bearer {{anonymousSession.response.body.$.id}} +### Share with users + +PUT http://localhost:3030/api/permissions +Authorization: Bearer {{anonymousSession.response.body.$.id}} +Content-Type: application/json + + { + "resource": { + "type": "dataset", + "id": "{{dataset.response.body.$.datasetName}}" + }, + "roleId": "4e8081b6-8aa6-4275-af0c-2fa2da557d28", + "permission": "Read" +} + +### Share wit hanonymous + +PUT http://localhost:3030/api/permissions +Authorization: Bearer {{anonymousSession.response.body.$.id}} +Content-Type: application/json + { + "resource": { + "type": "dataset", + "id": "{{dataset.response.body.$.datasetName}}" + }, + "roleId": "fd8e87bf-515c-4f36-8da6-1a53702ff102", + "permission": "Read" +} \ No newline at end of file