From 15ffb68fb02193ad7d6fca5524352eba36b2365f Mon Sep 17 00:00:00 2001 From: Christian Beilschmidt Date: Thu, 6 Feb 2025 08:25:11 +0100 Subject: [PATCH] Python --- .generation/Dockerfile | 2 +- .generation/README.md | 2 +- .generation/input/openapi.json | 1217 ++-- python/.github/workflows/python.yml | 17 +- python/.gitlab-ci.yml | 6 +- python/.openapi-generator/FILES | 66 +- python/.openapi-generator/VERSION | 2 +- python/.travis.yml | 4 +- python/README.md | 46 +- python/geoengine_openapi_client/__init__.py | 22 +- .../api/datasets_api.py | 4535 ++++++++---- .../api/general_api.py | 618 +- .../api/layers_api.py | 6316 +++++++++++------ python/geoengine_openapi_client/api/ml_api.py | 978 ++- .../api/ogcwcs_api.py | 1319 +++- .../api/ogcwfs_api.py | 943 ++- .../api/ogcwms_api.py | 1375 ++-- .../api/permissions_api.py | 1038 ++- .../geoengine_openapi_client/api/plots_api.py | 410 +- .../api/projects_api.py | 2315 ++++-- .../api/session_api.py | 2242 ++++-- .../api/spatial_references_api.py | 334 +- .../geoengine_openapi_client/api/tasks_api.py | 1015 ++- .../api/uploads_api.py | 1010 ++- .../geoengine_openapi_client/api/user_api.py | 4423 ++++++++---- .../api/workflows_api.py | 2374 +++++-- python/geoengine_openapi_client/api_client.py | 615 +- .../geoengine_openapi_client/api_response.py | 28 +- .../geoengine_openapi_client/configuration.py | 214 +- python/geoengine_openapi_client/exceptions.py | 98 +- .../models/__init__.py | 22 +- .../models/add_dataset.py | 97 +- .../models/add_layer.py | 75 +- .../models/add_layer_collection.py | 63 +- .../models/add_role.py | 58 +- .../models/auth_code_request_url.py | 58 +- .../models/auth_code_response.py | 64 +- .../models/auto_create_dataset.py | 86 +- .../models/axis_order.py | 13 +- .../models/bounding_box2_d.py | 66 +- .../models/breakpoint.py | 61 +- .../models/classification_measurement.py | 66 +- .../models/collection_item.py | 52 +- .../models/collection_type.py | 13 +- .../models/color_param.py | 64 +- .../models/color_param_static.py | 67 +- .../models/colorizer.py | 56 +- .../models/computation_quota.py | 68 +- .../models/continuous_measurement.py | 68 +- .../models/coordinate2_d.py | 60 +- .../models/create_dataset.py | 64 +- .../models/create_project.py | 72 +- .../models/csv_header.py | 13 +- .../models/data_id.py | 52 +- .../models/data_path.py | 42 +- .../models/data_path_one_of.py | 58 +- .../models/data_path_one_of1.py | 58 +- .../models/data_usage.py | 72 +- .../models/data_usage_summary.py | 62 +- .../models/dataset.py | 100 +- .../models/dataset_definition.py | 64 +- .../models/dataset_listing.py | 82 +- .../models/dataset_resource.py | 64 +- .../models/derived_color.py | 68 +- .../models/derived_number.py | 72 +- .../models/describe_coverage_request.py | 13 +- .../models/error_response.py | 60 +- .../models/external_data_id.py | 70 +- .../models/feature_data_type.py | 13 +- .../models/file_not_found_handling.py | 13 +- .../models/format_specifics.py | 42 +- .../models/format_specifics_one_of.py | 60 +- .../models/format_specifics_one_of_csv.py | 58 +- .../models/gdal_dataset_geo_transform.py | 68 +- .../models/gdal_dataset_parameters.py | 125 +- .../gdal_loading_info_temporal_slice.py | 73 +- .../models/gdal_meta_data_list.py | 76 +- .../models/gdal_meta_data_regular.py | 99 +- .../models/gdal_meta_data_static.py | 81 +- .../models/gdal_metadata_mapping.py | 66 +- .../models/gdal_metadata_net_cdf_cf.py | 91 +- .../models/gdal_source_time_placeholder.py | 60 +- .../models/geo_json.py | 60 +- .../models/get_capabilities_format.py | 13 +- .../models/get_capabilities_request.py | 13 +- .../models/get_coverage_format.py | 13 +- .../models/get_coverage_request.py | 13 +- .../models/get_feature_request.py | 13 +- .../models/get_legend_graphic_request.py | 13 +- .../models/get_map_exception_format.py | 13 +- .../models/get_map_format.py | 13 +- .../models/get_map_request.py | 13 +- .../models/inline_object.py | 88 + .../models/inline_object1.py | 88 + .../models/inline_object2.py | 88 + .../models/internal_data_id.py | 68 +- .../geoengine_openapi_client/models/layer.py | 79 +- .../models/layer_collection.py | 87 +- .../models/layer_collection_listing.py | 77 +- .../models/layer_collection_resource.py | 64 +- .../models/layer_listing.py | 75 +- .../models/layer_resource.py | 66 +- .../models/layer_visibility.py | 60 +- .../models/line_symbology.py | 76 +- .../models/linear_gradient.py | 89 +- .../models/logarithmic_gradient.py | 87 +- .../models/measurement.py | 56 +- .../models/meta_data_definition.py | 108 +- .../models/meta_data_suggestion.py | 68 +- .../models/ml_model.py | 72 +- .../models/ml_model_metadata.py | 73 +- .../models/ml_model_name_response.py | 60 +- .../models/ml_model_resource.py | 64 +- .../mock_dataset_data_source_loading_info.py | 66 +- .../models/multi_band_raster_colorizer.py | 119 +- .../models/multi_line_string.py | 66 +- .../models/multi_point.py | 66 +- .../models/multi_polygon.py | 66 +- .../models/number_param.py | 52 +- .../models/ogr_source_column_spec.py | 85 +- .../models/ogr_source_dataset.py | 123 +- .../models/ogr_source_dataset_time_type.py | 60 +- .../ogr_source_dataset_time_type_none.py | 64 +- .../ogr_source_dataset_time_type_start.py | 76 +- ...source_dataset_time_type_start_duration.py | 76 +- .../ogr_source_dataset_time_type_start_end.py | 80 +- .../models/ogr_source_duration_spec.py | 56 +- .../ogr_source_duration_spec_infinite.py | 64 +- .../models/ogr_source_duration_spec_value.py | 67 +- .../models/ogr_source_duration_spec_zero.py | 62 +- .../models/ogr_source_error_spec.py | 13 +- .../models/ogr_source_time_format.py | 140 +- .../models/ogr_source_time_format_one_of.py | 97 + .../models/ogr_source_time_format_one_of1.py | 98 + .../models/ogr_source_time_format_one_of2.py | 95 + .../models/operator_quota.py | 67 +- .../models/order_by.py | 13 +- .../models/palette_colorizer.py | 85 +- .../models/permission.py | 13 +- .../models/permission_list_options.py | 61 +- .../models/permission_listing.py | 66 +- .../models/permission_request.py | 68 +- .../geoengine_openapi_client/models/plot.py | 60 +- .../models/plot_output_format.py | 13 +- .../models/plot_result_descriptor.py | 74 +- .../models/point_symbology.py | 80 +- .../models/polygon_symbology.py | 80 +- .../models/project.py | 96 +- .../models/project_layer.py | 68 +- .../models/project_listing.py | 73 +- .../models/project_resource.py | 64 +- .../models/project_update_token.py | 13 +- .../models/project_version.py | 59 +- .../models/provenance.py | 62 +- .../models/provenance_entry.py | 70 +- .../models/provenance_output.py | 74 +- .../models/provenances.py | 66 +- .../models/provider_capabilities.py | 62 +- .../models/provider_layer_collection_id.py | 64 +- .../models/provider_layer_id.py | 64 +- .../models/query_rectangle.py | 104 + .../geoengine_openapi_client/models/quota.py | 61 +- .../models/raster_band_descriptor.py | 62 +- .../models/raster_colorizer.py | 52 +- .../models/raster_data_type.py | 13 +- .../models/raster_dataset_from_workflow.py | 83 +- .../raster_dataset_from_workflow_result.py | 62 +- .../models/raster_properties_entry_type.py | 13 +- .../models/raster_properties_key.py | 62 +- .../models/raster_result_descriptor.py | 96 +- .../raster_stream_websocket_result_type.py | 13 +- .../models/raster_symbology.py | 70 +- .../models/resource.py | 80 +- .../models/resource_id.py | 44 +- .../models/resource_id_dataset_id.py | 64 +- .../models/resource_id_layer.py | 66 +- .../models/resource_id_layer_collection.py | 64 +- .../models/resource_id_ml_model.py | 64 +- .../models/resource_id_project.py | 64 +- .../geoengine_openapi_client/models/role.py | 60 +- .../models/role_description.py | 62 +- .../models/search_capabilities.py | 68 +- .../models/search_type.py | 13 +- .../models/search_types.py | 60 +- .../models/server_info.py | 68 +- .../models/single_band_raster_colorizer.py | 71 +- .../models/spatial_partition2_d.py | 66 +- .../models/spatial_reference_authority.py | 13 +- .../models/spatial_reference_specification.py | 92 +- .../models/spatial_resolution.py | 62 +- .../models/st_rectangle.py | 68 +- .../models/static_meta_data.py | 107 + .../models/static_number_param.py | 67 +- .../models/stroke_param.py | 64 +- .../models/suggest_meta_data.py | 76 +- .../models/symbology.py | 60 +- .../models/task_abort_options.py | 56 +- .../models/task_filter.py | 13 +- .../models/task_list_options.py | 65 +- .../models/task_response.py | 62 +- .../models/task_status.py | 70 +- .../models/task_status_aborted.py | 70 +- .../models/task_status_completed.py | 78 +- .../models/task_status_failed.py | 76 +- .../models/task_status_running.py | 82 +- .../models/task_status_with_id.py | 85 +- .../models/text_symbology.py | 66 +- .../models/time_granularity.py | 13 +- .../models/time_interval.py | 62 +- .../models/time_reference.py | 13 +- .../models/time_step.py | 61 +- .../models/typed_geometry.py | 42 +- .../models/typed_geometry_one_of.py | 64 +- .../models/typed_geometry_one_of1.py | 60 +- .../models/typed_geometry_one_of2.py | 60 +- .../models/typed_geometry_one_of3.py | 60 +- .../models/typed_operator.py | 68 +- .../models/typed_operator_operator.py | 58 +- .../models/typed_plot_result_descriptor.py | 82 +- .../models/typed_raster_result_descriptor.py | 102 +- .../models/typed_result_descriptor.py | 56 +- .../models/typed_vector_result_descriptor.py | 92 +- .../models/unitless_measurement.py | 64 +- .../models/unix_time_stamp_type.py | 13 +- .../models/update_dataset.py | 64 +- .../models/update_layer.py | 75 +- .../models/update_layer_collection.py | 63 +- .../models/update_project.py | 111 +- .../models/update_quota.py | 58 +- .../models/upload_file_layers_response.py | 58 +- .../models/upload_files_response.py | 58 +- .../models/usage_summary_granularity.py | 13 +- .../models/user_credentials.py | 60 +- .../models/user_info.py | 70 +- .../models/user_registration.py | 64 +- .../models/user_session.py | 80 +- .../models/vec_update.py | 138 + .../models/vector_column_info.py | 64 +- .../models/vector_data_type.py | 13 +- .../models/vector_result_descriptor.py | 86 +- .../geoengine_openapi_client/models/volume.py | 63 +- .../models/volume_file_layers_response.py | 58 +- .../models/wcs_boundingbox.py | 62 +- .../models/wcs_service.py | 13 +- .../models/wcs_version.py | 13 +- .../models/wfs_service.py | 13 +- .../models/wfs_version.py | 13 +- .../models/wms_service.py | 13 +- .../models/wms_version.py | 13 +- .../models/workflow.py | 66 +- .../models/wrapped_plot_output.py | 66 +- python/geoengine_openapi_client/rest.py | 294 +- python/pyproject.toml | 75 +- python/requirements.txt | 9 +- python/setup.py | 12 +- python/test-requirements.txt | 9 +- python/test/test_add_dataset.py | 7 +- python/test/test_add_layer.py | 7 +- python/test/test_add_layer_collection.py | 7 +- python/test/test_add_role.py | 7 +- python/test/test_auth_code_request_url.py | 7 +- python/test/test_auth_code_response.py | 7 +- python/test/test_auto_create_dataset.py | 7 +- python/test/test_axis_order.py | 3 +- python/test/test_bounding_box2_d.py | 7 +- python/test/test_breakpoint.py | 7 +- .../test/test_classification_measurement.py | 7 +- python/test/test_collection_item.py | 7 +- python/test/test_collection_type.py | 3 +- python/test/test_color_param.py | 7 +- python/test/test_color_param_static.py | 7 +- python/test/test_colorizer.py | 7 +- python/test/test_computation_quota.py | 7 +- python/test/test_continuous_measurement.py | 7 +- python/test/test_coordinate2_d.py | 7 +- python/test/test_create_dataset.py | 11 +- python/test/test_create_project.py | 7 +- python/test/test_csv_header.py | 3 +- python/test/test_data_id.py | 7 +- python/test/test_data_path.py | 7 +- python/test/test_data_path_one_of.py | 7 +- python/test/test_data_path_one_of1.py | 7 +- python/test/test_data_usage.py | 7 +- python/test/test_data_usage_summary.py | 7 +- python/test/test_dataset.py | 7 +- python/test/test_dataset_definition.py | 11 +- python/test/test_dataset_listing.py | 7 +- python/test/test_dataset_resource.py | 7 +- python/test/test_datasets_api.py | 28 +- python/test/test_derived_color.py | 7 +- python/test/test_derived_number.py | 7 +- python/test/test_describe_coverage_request.py | 3 +- python/test/test_error_response.py | 7 +- python/test/test_external_data_id.py | 7 +- python/test/test_feature_data_type.py | 3 +- python/test/test_file_not_found_handling.py | 3 +- python/test/test_format_specifics.py | 7 +- python/test/test_format_specifics_one_of.py | 7 +- .../test/test_format_specifics_one_of_csv.py | 7 +- .../test/test_gdal_dataset_geo_transform.py | 7 +- python/test/test_gdal_dataset_parameters.py | 7 +- .../test_gdal_loading_info_temporal_slice.py | 7 +- python/test/test_gdal_meta_data_list.py | 105 +- python/test/test_gdal_meta_data_regular.py | 39 +- python/test/test_gdal_meta_data_static.py | 39 +- python/test/test_gdal_metadata_mapping.py | 7 +- python/test/test_gdal_metadata_net_cdf_cf.py | 39 +- .../test/test_gdal_source_time_placeholder.py | 7 +- python/test/test_general_api.py | 8 +- python/test/test_geo_json.py | 7 +- python/test/test_get_capabilities_format.py | 3 +- python/test/test_get_capabilities_request.py | 3 +- python/test/test_get_coverage_format.py | 3 +- python/test/test_get_coverage_request.py | 3 +- python/test/test_get_feature_request.py | 3 +- .../test/test_get_legend_graphic_request.py | 3 +- python/test/test_get_map_exception_format.py | 3 +- python/test/test_get_map_format.py | 3 +- python/test/test_get_map_request.py | 3 +- ...est_date_time.py => test_inline_object.py} | 29 +- ...200_response.py => test_inline_object1.py} | 25 +- ...200_response.py => test_inline_object2.py} | 25 +- python/test/test_internal_data_id.py | 7 +- python/test/test_layer.py | 7 +- python/test/test_layer_collection.py | 7 +- python/test/test_layer_collection_listing.py | 7 +- python/test/test_layer_collection_resource.py | 7 +- python/test/test_layer_listing.py | 7 +- python/test/test_layer_resource.py | 7 +- python/test/test_layer_update.py | 64 - python/test/test_layer_visibility.py | 7 +- python/test/test_layers_api.py | 38 +- python/test/test_line_symbology.py | 7 +- python/test/test_linear_gradient.py | 7 +- python/test/test_logarithmic_gradient.py | 7 +- python/test/test_measurement.py | 7 +- python/test/test_meta_data_definition.py | 93 +- python/test/test_meta_data_suggestion.py | 7 +- python/test/test_ml_api.py | 10 +- python/test/test_ml_model.py | 7 +- python/test/test_ml_model_metadata.py | 7 +- python/test/test_ml_model_name_response.py | 7 +- python/test/test_ml_model_resource.py | 7 +- ...t_mock_dataset_data_source_loading_info.py | 7 +- python/test/test_mock_meta_data.py | 86 - .../test/test_multi_band_raster_colorizer.py | 7 +- python/test/test_multi_line_string.py | 7 +- python/test/test_multi_point.py | 7 +- python/test/test_multi_polygon.py | 7 +- python/test/test_number_param.py | 7 +- python/test/test_ogcwcs_api.py | 10 +- python/test/test_ogcwfs_api.py | 8 +- python/test/test_ogcwms_api.py | 10 +- python/test/test_ogr_meta_data.py | 100 - python/test/test_ogr_source_column_spec.py | 9 +- python/test/test_ogr_source_dataset.py | 7 +- .../test/test_ogr_source_dataset_time_type.py | 7 +- .../test_ogr_source_dataset_time_type_none.py | 7 +- ...test_ogr_source_dataset_time_type_start.py | 7 +- ...source_dataset_time_type_start_duration.py | 7 +- ..._ogr_source_dataset_time_type_start_end.py | 7 +- python/test/test_ogr_source_duration_spec.py | 7 +- .../test_ogr_source_duration_spec_infinite.py | 7 +- .../test_ogr_source_duration_spec_value.py | 7 +- .../test_ogr_source_duration_spec_zero.py | 7 +- python/test/test_ogr_source_error_spec.py | 3 +- python/test/test_ogr_source_time_format.py | 7 +- ... => test_ogr_source_time_format_one_of.py} | 25 +- ...=> test_ogr_source_time_format_one_of1.py} | 25 +- ...=> test_ogr_source_time_format_one_of2.py} | 25 +- python/test/test_operator_quota.py | 7 +- python/test/test_order_by.py | 3 +- python/test/test_palette_colorizer.py | 7 +- python/test/test_permission.py | 3 +- python/test/test_permission_list_options.py | 7 +- python/test/test_permission_listing.py | 7 +- python/test/test_permission_request.py | 7 +- python/test/test_permissions_api.py | 10 +- python/test/test_plot.py | 7 +- python/test/test_plot_output_format.py | 3 +- python/test/test_plot_query_rectangle.py | 78 - python/test/test_plot_result_descriptor.py | 7 +- python/test/test_plots_api.py | 6 +- python/test/test_point_symbology.py | 7 +- python/test/test_polygon_symbology.py | 7 +- python/test/test_project.py | 7 +- python/test/test_project_layer.py | 7 +- python/test/test_project_listing.py | 7 +- python/test/test_project_resource.py | 7 +- python/test/test_project_update_token.py | 3 +- python/test/test_project_version.py | 7 +- python/test/test_projects_api.py | 18 +- python/test/test_provenance.py | 7 +- python/test/test_provenance_entry.py | 7 +- python/test/test_provenance_output.py | 7 +- python/test/test_provenances.py | 7 +- python/test/test_provider_capabilities.py | 7 +- .../test/test_provider_layer_collection_id.py | 7 +- python/test/test_provider_layer_id.py | 7 +- ...y_rectangle.py => test_query_rectangle.py} | 25 +- python/test/test_quota.py | 7 +- python/test/test_raster_band_descriptor.py | 7 +- python/test/test_raster_colorizer.py | 7 +- python/test/test_raster_data_type.py | 3 +- .../test/test_raster_dataset_from_workflow.py | 11 +- ...est_raster_dataset_from_workflow_result.py | 7 +- .../test/test_raster_properties_entry_type.py | 3 +- python/test/test_raster_properties_key.py | 7 +- python/test/test_raster_result_descriptor.py | 7 +- ...est_raster_stream_websocket_result_type.py | 3 +- python/test/test_raster_symbology.py | 7 +- python/test/test_resource.py | 7 +- python/test/test_resource_id.py | 7 +- python/test/test_resource_id_dataset_id.py | 7 +- python/test/test_resource_id_layer.py | 7 +- .../test/test_resource_id_layer_collection.py | 7 +- python/test/test_resource_id_ml_model.py | 7 +- python/test/test_resource_id_project.py | 7 +- python/test/test_role.py | 7 +- python/test/test_role_description.py | 7 +- python/test/test_search_capabilities.py | 7 +- python/test/test_search_type.py | 3 +- python/test/test_search_types.py | 7 +- python/test/test_server_info.py | 7 +- python/test/test_session_api.py | 18 +- .../test/test_single_band_raster_colorizer.py | 7 +- python/test/test_spatial_partition2_d.py | 7 +- .../test/test_spatial_reference_authority.py | 3 +- .../test_spatial_reference_specification.py | 7 +- python/test/test_spatial_references_api.py | 4 +- python/test/test_spatial_resolution.py | 7 +- python/test/test_st_rectangle.py | 7 +- python/test/test_static_meta_data.py | 157 + python/test/test_static_number_param.py | 7 +- python/test/test_stroke_param.py | 7 +- python/test/test_suggest_meta_data.py | 7 +- python/test/test_symbology.py | 7 +- python/test/test_task_abort_options.py | 7 +- python/test/test_task_filter.py | 3 +- python/test/test_task_list_options.py | 7 +- python/test/test_task_response.py | 7 +- python/test/test_task_status.py | 7 +- python/test/test_task_status_aborted.py | 7 +- python/test/test_task_status_completed.py | 7 +- python/test/test_task_status_failed.py | 7 +- python/test/test_task_status_running.py | 7 +- python/test/test_task_status_with_id.py | 7 +- python/test/test_tasks_api.py | 10 +- python/test/test_text_symbology.py | 7 +- python/test/test_time_granularity.py | 3 +- python/test/test_time_interval.py | 7 +- python/test/test_time_reference.py | 3 +- python/test/test_time_step.py | 7 +- python/test/test_typed_geometry.py | 7 +- python/test/test_typed_geometry_one_of.py | 7 +- python/test/test_typed_geometry_one_of1.py | 7 +- python/test/test_typed_geometry_one_of2.py | 7 +- python/test/test_typed_geometry_one_of3.py | 7 +- python/test/test_typed_operator.py | 7 +- python/test/test_typed_operator_operator.py | 7 +- .../test/test_typed_plot_result_descriptor.py | 7 +- .../test_typed_raster_result_descriptor.py | 7 +- python/test/test_typed_result_descriptor.py | 7 +- .../test_typed_vector_result_descriptor.py | 7 +- python/test/test_unitless_measurement.py | 7 +- python/test/test_unix_time_stamp_type.py | 3 +- python/test/test_update_dataset.py | 7 +- python/test/test_update_layer.py | 7 +- python/test/test_update_layer_collection.py | 7 +- python/test/test_update_project.py | 7 +- python/test/test_update_quota.py | 7 +- .../test/test_upload_file_layers_response.py | 7 +- python/test/test_upload_files_response.py | 7 +- python/test/test_uploads_api.py | 10 +- python/test/test_usage_summary_granularity.py | 3 +- python/test/test_user_api.py | 30 +- python/test/test_user_credentials.py | 7 +- python/test/test_user_info.py | 7 +- python/test/test_user_registration.py | 7 +- python/test/test_user_session.py | 7 +- ...test_plot_update.py => test_vec_update.py} | 25 +- python/test/test_vector_column_info.py | 7 +- python/test/test_vector_data_type.py | 3 +- python/test/test_vector_query_rectangle.py | 78 - python/test/test_vector_result_descriptor.py | 7 +- python/test/test_volume.py | 8 +- .../test/test_volume_file_layers_response.py | 7 +- python/test/test_wcs_boundingbox.py | 7 +- python/test/test_wcs_service.py | 3 +- python/test/test_wcs_version.py | 3 +- python/test/test_wfs_service.py | 3 +- python/test/test_wfs_version.py | 3 +- python/test/test_wms_service.py | 3 +- python/test/test_wms_version.py | 3 +- python/test/test_workflow.py | 7 +- python/test/test_workflows_api.py | 18 +- python/test/test_wrapped_plot_output.py | 7 +- 497 files changed, 32543 insertions(+), 17606 deletions(-) create mode 100644 python/geoengine_openapi_client/models/inline_object.py create mode 100644 python/geoengine_openapi_client/models/inline_object1.py create mode 100644 python/geoengine_openapi_client/models/inline_object2.py create mode 100644 python/geoengine_openapi_client/models/ogr_source_time_format_one_of.py create mode 100644 python/geoengine_openapi_client/models/ogr_source_time_format_one_of1.py create mode 100644 python/geoengine_openapi_client/models/ogr_source_time_format_one_of2.py create mode 100644 python/geoengine_openapi_client/models/query_rectangle.py create mode 100644 python/geoengine_openapi_client/models/static_meta_data.py create mode 100644 python/geoengine_openapi_client/models/vec_update.py rename python/test/{test_date_time.py => test_inline_object.py} (53%) rename python/test/{test_create_dataset_handler200_response.py => test_inline_object1.py} (52%) rename python/test/{test_add_collection200_response.py => test_inline_object2.py} (54%) delete mode 100644 python/test/test_layer_update.py delete mode 100644 python/test/test_mock_meta_data.py delete mode 100644 python/test/test_ogr_meta_data.py rename python/test/{test_ogr_source_time_format_custom.py => test_ogr_source_time_format_one_of.py} (64%) rename python/test/{test_ogr_source_time_format_unix_time_stamp.py => test_ogr_source_time_format_one_of1.py} (62%) rename python/test/{test_ogr_source_time_format_auto.py => test_ogr_source_time_format_one_of2.py} (63%) delete mode 100644 python/test/test_plot_query_rectangle.py rename python/test/{test_raster_query_rectangle.py => test_query_rectangle.py} (78%) create mode 100644 python/test/test_static_meta_data.py rename python/test/{test_plot_update.py => test_vec_update.py} (63%) delete mode 100644 python/test/test_vector_query_rectangle.py diff --git a/.generation/Dockerfile b/.generation/Dockerfile index 16b82c7b..ff7fb287 100644 --- a/.generation/Dockerfile +++ b/.generation/Dockerfile @@ -1,5 +1,5 @@ # Patched version of openapi-generator-cli with python3 support -FROM docker.io/openapitools/openapi-generator-cli:v7.0.1 +FROM docker.io/openapitools/openapi-generator-cli:v7.11.0 RUN apt-get update && apt-get install -y python3 diff --git a/.generation/README.md b/.generation/README.md index 0ba2770b..b294658e 100644 --- a/.generation/README.md +++ b/.generation/README.md @@ -26,7 +26,7 @@ From the root of the repository run: To fetch the OpenAPI spec from the backend, run: ```bash -cargo run --features pro +cargo run wget http://localhost:3030/api/api-docs/openapi.json -O - \ | python -m json.tool --indent 2 > .generation/input/openapi.json ``` diff --git a/.generation/input/openapi.json b/.generation/input/openapi.json index ffd9853d..811acf5a 100644 --- a/.generation/input/openapi.json +++ b/.generation/input/openapi.json @@ -1,5 +1,5 @@ { - "openapi": "3.0.3", + "openapi": "3.1.0", "info": { "title": "Geo Engine API", "description": "", @@ -15,7 +15,12 @@ }, "servers": [ { - "url": "http://127.0.0.1:3030/api" + "url": "{server}/api", + "variables": { + "server": { + "default": "https://geoengine.io" + } + } } ], "paths": { @@ -75,8 +80,7 @@ "tags": [ "Datasets" ], - "summary": "Creates a new dataset referencing files.", - "description": "Users can reference previously uploaded files.\nAdmins can reference files from a volume.", + "summary": "Creates a new dataset referencing files.\nUsers can reference previously uploaded files.\nAdmins can reference files from a volume.", "operationId": "create_dataset_handler", "requestBody": { "content": { @@ -105,8 +109,7 @@ "tags": [ "Datasets" ], - "summary": "Creates a new dataset using previously uploaded files.", - "description": "The format of the files will be automatically detected when possible.", + "summary": "Creates a new dataset using previously uploaded files.\nThe format of the files will be automatically detected when possible.", "operationId": "auto_create_dataset_handler", "requestBody": { "content": { @@ -198,8 +201,7 @@ "tags": [ "Datasets" ], - "summary": "Inspects an upload and suggests metadata that can be used when creating a new dataset based on it.", - "description": "Tries to automatically detect the main file and layer name if not specified.", + "summary": "Inspects an upload and suggests metadata that can be used when creating a new dataset based on it.\nTries to automatically detect the main file and layer name if not specified.", "operationId": "suggest_meta_data_handler", "requestBody": { "content": { @@ -794,8 +796,7 @@ "tags": [ "Workflows" ], - "summary": "Create a task for creating a new dataset from the result of the workflow given by its `id` and the dataset parameters in the request body.", - "description": "Returns the id of the created task", + "summary": "Create a task for creating a new dataset from the result of the workflow given by its `id` and the dataset parameters in the request body.\nReturns the id of the created task", "operationId": "dataset_from_workflow_handler", "parameters": [ { @@ -853,8 +854,7 @@ "in": "query", "required": false, "schema": { - "type": "string", - "nullable": true + "type": "string" }, "example": "Germany" }, @@ -897,8 +897,7 @@ "type": "array", "items": { "type": "string" - }, - "nullable": true + } }, "example": "['tag1', 'tag2']" } @@ -1813,7 +1812,7 @@ "description": "Layer id", "required": true, "schema": { - "$ref": "#/components/schemas/LayerId" + "$ref": "#/components/schemas/LayerCollectionId" } } ], @@ -2004,7 +2003,7 @@ "description": "Layer id", "required": true, "schema": { - "$ref": "#/components/schemas/LayerId" + "$ref": "#/components/schemas/LayerCollectionId" } } ], @@ -2053,7 +2052,7 @@ "description": "Layer id", "required": true, "schema": { - "$ref": "#/components/schemas/LayerId" + "$ref": "#/components/schemas/LayerCollectionId" } } ], @@ -2241,7 +2240,7 @@ "Session" ], "summary": "Initializes the Open Id Connect login procedure by requesting a parametrized url to the configured Id Provider.", - "description": "# Errors\n\nThis call fails if Open ID Connect is disabled, misconfigured or the Id Provider is unreachable.\n", + "description": "# Errors\n\nThis call fails if Open ID Connect is disabled, misconfigured or the Id Provider is unreachable.\n\n", "operationId": "oidc_init", "responses": { "200": { @@ -2265,8 +2264,8 @@ "tags": [ "Session" ], - "summary": "Creates a session for a user via a login with Open Id Connect.", - "description": "This call must be preceded by a call to oidcInit and match the parameters of that call.\n\n# Errors\n\nThis call fails if the [`AuthCodeResponse`] is invalid,\nif a previous oidcLogin call with the same state was already successfully or unsuccessfully resolved,\nif the Open Id Connect configuration is invalid,\nor if the Id Provider is unreachable.\n", + "summary": "Creates a session for a user via a login with Open Id Connect.\nThis call must be preceded by a call to oidcInit and match the parameters of that call.", + "description": "# Errors\n\nThis call fails if the [`AuthCodeResponse`] is invalid,\nif a previous oidcLogin call with the same state was already successfully or unsuccessfully resolved,\nif the Open Id Connect configuration is invalid,\nor if the Id Provider is unreachable.\n\n", "operationId": "oidc_login", "requestBody": { "content": { @@ -2468,8 +2467,10 @@ "in": "query", "required": false, "schema": { - "type": "string", - "nullable": true + "type": [ + "string", + "null" + ] }, "example": "EPSG:4326" }, @@ -2665,8 +2666,7 @@ "tags": [ "Projects" ], - "summary": "Updates a project.", - "description": "This will create a new version.", + "summary": "Updates a project.\nThis will create a new version.", "operationId": "update_project_handler", "parameters": [ { @@ -3116,8 +3116,10 @@ "in": "query", "required": false, "schema": { - "type": "string", - "nullable": true + "type": [ + "string", + "null" + ] } } ], @@ -3432,12 +3434,14 @@ "in": "path", "required": true, "schema": { - "allOf": [ + "oneOf": [ + { + "type": "null" + }, { "$ref": "#/components/schemas/TaskFilter" } - ], - "nullable": true + ] } }, { @@ -3502,7 +3506,7 @@ "Tasks" ], "summary": "Abort a running task.", - "description": "# Parameters\n\n* `force` - If true, the task will be aborted without clean-up.\nYou can abort a task that is already in the process of aborting.", + "description": "# Parameters\n\n* `force` - If true, the task will be aborted without clean-up.\n You can abort a task that is already in the process of aborting.", "operationId": "abort_handler", "parameters": [ { @@ -3567,7 +3571,7 @@ "description": "Demo", "pctComplete": "0.00%", "timeStarted": "2023-02-16T15:25:45.390Z", - "estimatedTimeRemaining": "? (\u00b1 ?)", + "estimatedTimeRemaining": "? (± ?)", "info": null } } @@ -3964,12 +3968,14 @@ "in": "query", "required": false, "schema": { - "allOf": [ + "oneOf": [ + { + "type": "null" + }, { "$ref": "#/components/schemas/WcsVersion" } - ], - "nullable": true + ] } }, { @@ -4115,9 +4121,11 @@ "in": "query", "required": false, "schema": { - "type": "number", - "format": "double", - "nullable": true + "type": [ + "number", + "null" + ], + "format": "double" } }, { @@ -4125,9 +4133,11 @@ "in": "query", "required": false, "schema": { - "type": "number", - "format": "double", - "nullable": true + "type": [ + "number", + "null" + ], + "format": "double" } }, { @@ -4135,9 +4145,11 @@ "in": "query", "required": false, "schema": { - "type": "number", - "format": "double", - "nullable": true + "type": [ + "number", + "null" + ], + "format": "double" } } ], @@ -4175,12 +4187,14 @@ "in": "path", "required": true, "schema": { - "allOf": [ + "oneOf": [ + { + "type": "null" + }, { "$ref": "#/components/schemas/WfsVersion" } - ], - "nullable": true + ] } }, { @@ -4241,12 +4255,14 @@ "in": "query", "required": false, "schema": { - "allOf": [ + "oneOf": [ + { + "type": "null" + }, { "$ref": "#/components/schemas/WfsVersion" } - ], - "nullable": true + ] } }, { @@ -4297,8 +4313,10 @@ "in": "query", "required": false, "schema": { - "type": "string", - "nullable": true + "type": [ + "string", + "null" + ] }, "example": "EPSG:4326" }, @@ -4307,8 +4325,10 @@ "in": "query", "required": false, "schema": { - "type": "string", - "nullable": true + "type": [ + "string", + "null" + ] } }, { @@ -4316,9 +4336,11 @@ "in": "query", "required": false, "schema": { - "type": "integer", + "type": [ + "integer", + "null" + ], "format": "int64", - "nullable": true, "minimum": 0 } }, @@ -4327,8 +4349,10 @@ "in": "query", "required": false, "schema": { - "type": "string", - "nullable": true + "type": [ + "string", + "null" + ] } }, { @@ -4336,8 +4360,10 @@ "in": "query", "required": false, "schema": { - "type": "string", - "nullable": true + "type": [ + "string", + "null" + ] } }, { @@ -4345,8 +4371,10 @@ "in": "query", "required": false, "schema": { - "type": "string", - "nullable": true + "type": [ + "string", + "null" + ] } }, { @@ -4354,8 +4382,10 @@ "in": "query", "required": false, "schema": { - "type": "string", - "nullable": true + "type": [ + "string", + "null" + ] } }, { @@ -4364,12 +4394,14 @@ "description": "Vendor parameter for specifying a spatial query resolution", "required": false, "schema": { - "allOf": [ + "oneOf": [ + { + "type": "null" + }, { "$ref": "#/components/schemas/WfsResolution" } - ], - "nullable": true + ] } } ], @@ -4509,12 +4541,14 @@ "in": "path", "required": true, "schema": { - "allOf": [ + "oneOf": [ + { + "type": "null" + }, { "$ref": "#/components/schemas/WmsVersion" } - ], - "nullable": true + ] } }, { @@ -4538,12 +4572,14 @@ "in": "path", "required": true, "schema": { - "allOf": [ + "oneOf": [ + { + "type": "null" + }, { "$ref": "#/components/schemas/GetCapabilitiesFormat" } - ], - "nullable": true + ] } } ], @@ -4723,8 +4759,10 @@ "in": "query", "required": false, "schema": { - "type": "string", - "nullable": true + "type": [ + "string", + "null" + ] }, "example": "EPSG:4326" }, @@ -4751,8 +4789,10 @@ "in": "query", "required": false, "schema": { - "type": "boolean", - "nullable": true + "type": [ + "boolean", + "null" + ] } }, { @@ -4760,8 +4800,10 @@ "in": "query", "required": false, "schema": { - "type": "string", - "nullable": true + "type": [ + "string", + "null" + ] } }, { @@ -4769,8 +4811,10 @@ "in": "query", "required": false, "schema": { - "type": "string", - "nullable": true + "type": [ + "string", + "null" + ] } }, { @@ -4778,8 +4822,10 @@ "in": "query", "required": false, "schema": { - "type": "string", - "nullable": true + "type": [ + "string", + "null" + ] } }, { @@ -4787,8 +4833,10 @@ "in": "query", "required": false, "schema": { - "type": "string", - "nullable": true + "type": [ + "string", + "null" + ] } }, { @@ -4796,12 +4844,14 @@ "in": "query", "required": false, "schema": { - "allOf": [ + "oneOf": [ + { + "type": "null" + }, { "$ref": "#/components/schemas/GetMapExceptionFormat" } - ], - "nullable": true + ] } } ], @@ -5167,37 +5217,45 @@ "type": "string" }, "name": { - "allOf": [ + "oneOf": [ + { + "type": "null" + }, { "$ref": "#/components/schemas/DatasetName" } - ], - "nullable": true + ] }, "provenance": { - "type": "array", + "type": [ + "array", + "null" + ], "items": { "$ref": "#/components/schemas/Provenance" - }, - "nullable": true + } }, "sourceOperator": { "type": "string" }, "symbology": { - "allOf": [ + "oneOf": [ + { + "type": "null" + }, { "$ref": "#/components/schemas/Symbology" } - ], - "nullable": true + ] }, "tags": { - "type": "array", + "type": [ + "array", + "null" + ], "items": { "type": "string" - }, - "nullable": true + } } } }, @@ -5218,6 +5276,9 @@ "description": "metadata used for loading the data", "additionalProperties": { "type": "string" + }, + "propertyNames": { + "type": "string" } }, "name": { @@ -5232,12 +5293,14 @@ "description": "properties, for instance, to be rendered in the UI" }, "symbology": { - "allOf": [ + "oneOf": [ + { + "type": "null" + }, { "$ref": "#/components/schemas/Symbology" } - ], - "nullable": true + ] }, "workflow": { "$ref": "#/components/schemas/Workflow" @@ -5325,18 +5388,22 @@ "type": "string" }, "layerName": { - "type": "string", - "nullable": true + "type": [ + "string", + "null" + ] }, "mainFile": { "type": "string" }, "tags": { - "type": "array", + "type": [ + "array", + "null" + ], "items": { "type": "string" - }, - "nullable": true + } }, "upload": { "$ref": "#/components/schemas/UploadId" @@ -5352,14 +5419,6 @@ ] } }, - "AxisLabels": { - "type": "array", - "items": { - "type": "string" - }, - "maxItems": 2, - "minItems": 2 - }, "AxisOrder": { "type": "string", "enum": [ @@ -5422,6 +5481,11 @@ "type": "object", "additionalProperties": { "type": "string" + }, + "propertyNames": { + "type": "integer", + "format": "int32", + "minimum": 0 } }, "measurement": { @@ -5559,8 +5623,10 @@ ] }, "unit": { - "type": "string", - "nullable": true + "type": [ + "string", + "null" + ] } } }, @@ -5614,12 +5680,14 @@ "type": "string" }, "timeStep": { - "allOf": [ + "oneOf": [ + { + "type": "null" + }, { "$ref": "#/components/schemas/TimeStep" } - ], - "nullable": true + ] } }, "example": { @@ -5781,11 +5849,13 @@ "$ref": "#/components/schemas/DatasetName" }, "provenance": { - "type": "array", + "type": [ + "array", + "null" + ], "items": { "$ref": "#/components/schemas/Provenance" - }, - "nullable": true + } }, "resultDescriptor": { "$ref": "#/components/schemas/TypedResultDescriptor" @@ -5794,19 +5864,23 @@ "type": "string" }, "symbology": { - "allOf": [ + "oneOf": [ + { + "type": "null" + }, { "$ref": "#/components/schemas/Symbology" } - ], - "nullable": true + ] }, "tags": { - "type": "array", + "type": [ + "array", + "null" + ], "items": { "type": "string" - }, - "nullable": true + } } } }, @@ -5860,12 +5934,14 @@ "type": "string" }, "symbology": { - "allOf": [ + "oneOf": [ + { + "type": "null" + }, { "$ref": "#/components/schemas/Symbology" } - ], - "nullable": true + ] }, "tags": { "type": "array", @@ -5882,8 +5958,8 @@ "type": "object", "title": "DatasetResource", "required": [ - "type", - "id" + "id", + "type" ], "properties": { "id": { @@ -5897,22 +5973,12 @@ } } }, - "DateTime": { - "type": "object", - "description": "An object that composes the date and a timestamp with time zone.", - "required": [ - "datetime" - ], - "properties": { - "datetime": { - "type": "string", - "format": "date-time" - } - } - }, "DateTimeParseFormat": { "type": "string" }, + "DateTimeString": { + "type": "string" + }, "DerivedColor": { "type": "object", "required": [ @@ -6047,14 +6113,6 @@ } ] }, - "GdalConfigOption": { - "type": "array", - "items": { - "type": "string" - }, - "maxItems": 2, - "minItems": 2 - }, "GdalDatasetGeoTransform": { "type": "object", "required": [ @@ -6098,18 +6156,22 @@ "type": "string" }, "gdalConfigOptions": { - "type": "array", + "type": [ + "array", + "null" + ], "items": { - "$ref": "#/components/schemas/GdalConfigOption" - }, - "nullable": true + "$ref": "#/components/schemas/StringPair" + } }, "gdalOpenOptions": { - "type": "array", + "type": [ + "array", + "null" + ], "items": { "type": "string" - }, - "nullable": true + } }, "geoTransform": { "$ref": "#/components/schemas/GdalDatasetGeoTransform" @@ -6119,16 +6181,20 @@ "minimum": 0 }, "noDataValue": { - "type": "number", - "format": "double", - "nullable": true + "type": [ + "number", + "null" + ], + "format": "double" }, "propertiesMapping": { - "type": "array", + "type": [ + "array", + "null" + ], "items": { "$ref": "#/components/schemas/GdalMetadataMapping" - }, - "nullable": true + } }, "rasterbandChannel": { "type": "integer", @@ -6151,12 +6217,14 @@ "$ref": "#/components/schemas/CacheTtlSeconds" }, "params": { - "allOf": [ + "oneOf": [ + { + "type": "null" + }, { "$ref": "#/components/schemas/GdalDatasetParameters" } - ], - "nullable": true + ] }, "time": { "$ref": "#/components/schemas/TimeInterval" @@ -6218,6 +6286,9 @@ "type": "object", "additionalProperties": { "$ref": "#/components/schemas/GdalSourceTimePlaceholder" + }, + "propertyNames": { + "type": "string" } }, "type": { @@ -6246,12 +6317,14 @@ "$ref": "#/components/schemas/RasterResultDescriptor" }, "time": { - "allOf": [ + "oneOf": [ + { + "type": "null" + }, { "$ref": "#/components/schemas/TimeInterval" } - ], - "nullable": true + ] }, "type": { "type": "string", @@ -6302,7 +6375,8 @@ "$ref": "#/components/schemas/CacheTtlSeconds" }, "end": { - "$ref": "#/components/schemas/TimeInstance" + "$ref": "#/components/schemas/TimeInstance", + "description": "We use the end to specify the last, non-inclusive valid time point.\nQueries behind this point return no data.\nTODO: Alternatively, we could think about using the number of possible time steps in the future." }, "params": { "$ref": "#/components/schemas/GdalDatasetParameters" @@ -6449,6 +6523,9 @@ "description": "metadata used for loading the data", "additionalProperties": { "type": "string" + }, + "propertyNames": { + "type": "string" } }, "name": { @@ -6462,12 +6539,14 @@ "description": "properties, for instance, to be rendered in the UI" }, "symbology": { - "allOf": [ + "oneOf": [ + { + "type": "null" + }, { "$ref": "#/components/schemas/Symbology" } - ], - "nullable": true + ] }, "workflow": { "$ref": "#/components/schemas/Workflow" @@ -6488,9 +6567,11 @@ "type": "string" }, "entryLabel": { - "type": "string", - "description": "a common label for the collection's entries, if there is any", - "nullable": true + "type": [ + "string", + "null" + ], + "description": "a common label for the collection's entries, if there is any" }, "id": { "$ref": "#/components/schemas/ProviderLayerCollectionId" @@ -6551,8 +6632,8 @@ "type": "object", "title": "LayerCollectionResource", "required": [ - "type", - "id" + "id", + "type" ], "properties": { "id": { @@ -6606,8 +6687,8 @@ "type": "object", "title": "LayerResource", "required": [ - "type", - "id" + "id", + "type" ], "properties": { "id": { @@ -6621,16 +6702,6 @@ } } }, - "LayerUpdate": { - "oneOf": [ - { - "$ref": "#/components/schemas/ProjectUpdateToken" - }, - { - "$ref": "#/components/schemas/ProjectLayer" - } - ] - }, "LayerVisibility": { "type": "object", "required": [ @@ -6661,12 +6732,14 @@ "$ref": "#/components/schemas/StrokeParam" }, "text": { - "allOf": [ + "oneOf": [ + { + "type": "null" + }, { "$ref": "#/components/schemas/TextSymbology" } - ], - "nullable": true + ] }, "type": { "type": "string", @@ -6766,10 +6839,10 @@ "MetaDataDefinition": { "oneOf": [ { - "$ref": "#/components/schemas/MockMetaData" + "$ref": "#/components/schemas/StaticMetaData" }, { - "$ref": "#/components/schemas/OgrMetaData" + "$ref": "#/components/schemas/StaticMetaData" }, { "$ref": "#/components/schemas/GdalMetaDataRegular" @@ -6791,8 +6864,8 @@ "GdalMetaDataRegular": "#/components/schemas/GdalMetaDataRegular", "GdalMetadataNetCdfCf": "#/components/schemas/GdalMetadataNetCdfCf", "GdalStatic": "#/components/schemas/GdalMetaDataStatic", - "MockMetaData": "#/components/schemas/MockMetaData", - "OgrMetaData": "#/components/schemas/OgrMetaData" + "MockMetaData": "#/components/schemas/StaticMetaData", + "OgrMetaData": "#/components/schemas/StaticMetaData" } } }, @@ -6889,12 +6962,12 @@ "type": "object", "title": "MlModelResource", "required": [ - "type", - "id" + "id", + "type" ], "properties": { "id": { - "$ref": "#/components/schemas/MlModelName" + "type": "string" }, "type": { "type": "string", @@ -6918,28 +6991,6 @@ } } }, - "MockMetaData": { - "type": "object", - "required": [ - "loadingInfo", - "resultDescriptor", - "type" - ], - "properties": { - "loadingInfo": { - "$ref": "#/components/schemas/MockDatasetDataSourceLoadingInfo" - }, - "resultDescriptor": { - "$ref": "#/components/schemas/VectorResultDescriptor" - }, - "type": { - "type": "string", - "enum": [ - "MockMetaData" - ] - } - } - }, "MultiBandRasterColorizer": { "type": "object", "title": "MultiBandRasterColorizer", @@ -6999,7 +7050,8 @@ "description": "A scaling factor for the green channel between 0 and 1." }, "noDataColor": { - "$ref": "#/components/schemas/RgbaColor" + "$ref": "#/components/schemas/RgbaColor", + "description": "The color to use for no data values.\nIf not specified, the no data values will be transparent." }, "redBand": { "type": "integer", @@ -7085,8 +7137,7 @@ "type": "string" }, "NoGeometry": { - "default": null, - "nullable": true + "default": null }, "NumberParam": { "oneOf": [ @@ -7108,28 +7159,6 @@ "OgcBoundingBox": { "type": "string" }, - "OgrMetaData": { - "type": "object", - "required": [ - "loadingInfo", - "resultDescriptor", - "type" - ], - "properties": { - "loadingInfo": { - "$ref": "#/components/schemas/OgrSourceDataset" - }, - "resultDescriptor": { - "$ref": "#/components/schemas/VectorResultDescriptor" - }, - "type": { - "type": "string", - "enum": [ - "OgrMetaData" - ] - } - } - }, "OgrSourceColumnSpec": { "type": "object", "required": [ @@ -7155,12 +7184,14 @@ } }, "formatSpecifics": { - "allOf": [ + "oneOf": [ + { + "type": "null" + }, { "$ref": "#/components/schemas/FormatSpecifics" } - ], - "nullable": true + ] }, "int": { "type": "array", @@ -7169,11 +7200,16 @@ } }, "rename": { - "type": "object", + "type": [ + "object", + "null" + ], "additionalProperties": { "type": "string" }, - "nullable": true + "propertyNames": { + "type": "string" + } }, "text": { "type": "array", @@ -7185,8 +7221,10 @@ "type": "string" }, "y": { - "type": "string", - "nullable": true + "type": [ + "string", + "null" + ] } } }, @@ -7199,35 +7237,43 @@ ], "properties": { "attributeQuery": { - "type": "string", - "nullable": true + "type": [ + "string", + "null" + ] }, "cacheTtl": { "$ref": "#/components/schemas/CacheTtlSeconds" }, "columns": { - "allOf": [ + "oneOf": [ + { + "type": "null" + }, { "$ref": "#/components/schemas/OgrSourceColumnSpec" } - ], - "nullable": true + ] }, "dataType": { - "allOf": [ + "oneOf": [ + { + "type": "null" + }, { "$ref": "#/components/schemas/VectorDataType" } - ], - "nullable": true + ] }, "defaultGeometry": { - "allOf": [ + "oneOf": [ + { + "type": "null" + }, { "$ref": "#/components/schemas/TypedGeometry" } - ], - "nullable": true + ] }, "fileName": { "type": "string" @@ -7245,8 +7291,10 @@ "$ref": "#/components/schemas/OgrSourceErrorSpec" }, "sqlQuery": { - "type": "string", - "nullable": true + "type": [ + "string", + "null" + ] }, "time": { "$ref": "#/components/schemas/OgrSourceDatasetTimeType" @@ -7459,73 +7507,56 @@ "OgrSourceTimeFormat": { "oneOf": [ { - "$ref": "#/components/schemas/OgrSourceTimeFormatCustom" + "type": "object", + "required": [ + "customFormat", + "format" + ], + "properties": { + "customFormat": { + "$ref": "#/components/schemas/DateTimeParseFormat" + }, + "format": { + "type": "string", + "enum": [ + "custom" + ] + } + } }, { - "$ref": "#/components/schemas/OgrSourceTimeFormatUnixTimeStamp" + "type": "object", + "required": [ + "timestampType", + "format" + ], + "properties": { + "format": { + "type": "string", + "enum": [ + "unixTimeStamp" + ] + }, + "timestampType": { + "$ref": "#/components/schemas/UnixTimeStampType" + } + } }, { - "$ref": "#/components/schemas/OgrSourceTimeFormatAuto" - } - ], - "discriminator": { - "propertyName": "format", - "mapping": { - "auto": "#/components/schemas/OgrSourceTimeFormatAuto", - "custom": "#/components/schemas/OgrSourceTimeFormatCustom", - "unixTimeStamp": "#/components/schemas/OgrSourceTimeFormatUnixTimeStamp" - } - } - }, - "OgrSourceTimeFormatAuto": { - "type": "object", - "required": [ - "format" - ], - "properties": { - "format": { - "type": "string", - "enum": [ - "auto" - ] - } - } - }, - "OgrSourceTimeFormatCustom": { - "type": "object", - "required": [ - "customFormat", - "format" - ], - "properties": { - "customFormat": { - "$ref": "#/components/schemas/DateTimeParseFormat" - }, - "format": { - "type": "string", - "enum": [ - "custom" - ] - } - } - }, - "OgrSourceTimeFormatUnixTimeStamp": { - "type": "object", - "required": [ - "timestampType", - "format" - ], - "properties": { - "format": { - "type": "string", - "enum": [ - "unixTimeStamp" - ] - }, - "timestampType": { - "$ref": "#/components/schemas/UnixTimeStampType" + "type": "object", + "required": [ + "format" + ], + "properties": { + "format": { + "type": "string", + "enum": [ + "auto" + ] + } + } } - } + ] }, "OperatorQuota": { "type": "object", @@ -7560,6 +7591,10 @@ "description": "A map from value to color\n\nIt is assumed that is has at least one and at most 256 entries.", "additionalProperties": { "$ref": "#/components/schemas/RgbaColor" + }, + "propertyNames": { + "type": "number", + "format": "double" } }, "PaletteColorizer": { @@ -7677,26 +7712,6 @@ "ImagePng" ] }, - "PlotQueryRectangle": { - "type": "object", - "description": "A spatio-temporal rectangle with a specified resolution", - "required": [ - "spatialBounds", - "timeInterval", - "spatialResolution" - ], - "properties": { - "spatialBounds": { - "$ref": "#/components/schemas/BoundingBox2D" - }, - "spatialResolution": { - "$ref": "#/components/schemas/SpatialResolution" - }, - "timeInterval": { - "$ref": "#/components/schemas/TimeInterval" - } - } - }, "PlotResultDescriptor": { "type": "object", "description": "A `ResultDescriptor` for plot queries", @@ -7705,36 +7720,30 @@ ], "properties": { "bbox": { - "allOf": [ + "oneOf": [ + { + "type": "null" + }, { "$ref": "#/components/schemas/BoundingBox2D" } - ], - "nullable": true + ] }, "spatialReference": { "type": "string" }, "time": { - "allOf": [ + "oneOf": [ + { + "type": "null" + }, { "$ref": "#/components/schemas/TimeInterval" } - ], - "nullable": true + ] } } }, - "PlotUpdate": { - "oneOf": [ - { - "$ref": "#/components/schemas/ProjectUpdateToken" - }, - { - "$ref": "#/components/schemas/Plot" - } - ] - }, "PointSymbology": { "type": "object", "required": [ @@ -7754,12 +7763,14 @@ "$ref": "#/components/schemas/StrokeParam" }, "text": { - "allOf": [ + "oneOf": [ + { + "type": "null" + }, { "$ref": "#/components/schemas/TextSymbology" } - ], - "nullable": true + ] }, "type": { "type": "string", @@ -7788,12 +7799,14 @@ "$ref": "#/components/schemas/StrokeParam" }, "text": { - "allOf": [ + "oneOf": [ + { + "type": "null" + }, { "$ref": "#/components/schemas/TextSymbology" } - ], - "nullable": true + ] }, "type": { "type": "string", @@ -7917,8 +7930,8 @@ "type": "object", "title": "ProjectResource", "required": [ - "type", - "id" + "id", + "type" ], "properties": { "id": { @@ -8014,11 +8027,13 @@ "$ref": "#/components/schemas/DataId" }, "provenance": { - "type": "array", + "type": [ + "array", + "null" + ], "items": { "$ref": "#/components/schemas/Provenance" - }, - "nullable": true + } } } }, @@ -8081,6 +8096,26 @@ } } }, + "QueryRectangle": { + "type": "object", + "description": "A spatio-temporal rectangle with a specified resolution", + "required": [ + "spatialBounds", + "timeInterval", + "spatialResolution" + ], + "properties": { + "spatialBounds": { + "$ref": "#/components/schemas/SpatialPartition2D" + }, + "spatialResolution": { + "$ref": "#/components/schemas/SpatialResolution" + }, + "timeInterval": { + "$ref": "#/components/schemas/TimeInterval" + } + } + }, "Quota": { "type": "object", "required": [ @@ -8165,22 +8200,26 @@ "default": true }, "description": { - "type": "string", - "nullable": true + "type": [ + "string", + "null" + ] }, "displayName": { "type": "string" }, "name": { - "allOf": [ + "oneOf": [ + { + "type": "null" + }, { "$ref": "#/components/schemas/DatasetName" } - ], - "nullable": true + ] }, "query": { - "$ref": "#/components/schemas/RasterQueryRectangle" + "$ref": "#/components/schemas/QueryRectangle" } }, "example": { @@ -8239,34 +8278,16 @@ ], "properties": { "domain": { - "type": "string", - "nullable": true + "type": [ + "string", + "null" + ] }, "key": { "type": "string" } } }, - "RasterQueryRectangle": { - "type": "object", - "description": "A spatio-temporal rectangle with a specified resolution", - "required": [ - "spatialBounds", - "timeInterval", - "spatialResolution" - ], - "properties": { - "spatialBounds": { - "$ref": "#/components/schemas/SpatialPartition2D" - }, - "spatialResolution": { - "$ref": "#/components/schemas/SpatialResolution" - }, - "timeInterval": { - "$ref": "#/components/schemas/TimeInterval" - } - } - }, "RasterResultDescriptor": { "type": "object", "description": "A `ResultDescriptor` for raster queries", @@ -8280,34 +8301,40 @@ "$ref": "#/components/schemas/RasterBandDescriptors" }, "bbox": { - "allOf": [ + "oneOf": [ + { + "type": "null" + }, { "$ref": "#/components/schemas/SpatialPartition2D" } - ], - "nullable": true + ] }, "dataType": { "$ref": "#/components/schemas/RasterDataType" }, "resolution": { - "allOf": [ + "oneOf": [ + { + "type": "null" + }, { "$ref": "#/components/schemas/SpatialResolution" } - ], - "nullable": true + ] }, "spatialReference": { "type": "string" }, "time": { - "allOf": [ + "oneOf": [ + { + "type": "null" + }, { "$ref": "#/components/schemas/TimeInterval" } - ], - "nullable": true + ] } } }, @@ -8402,8 +8429,8 @@ "ResourceIdDatasetId": { "type": "object", "required": [ - "type", - "id" + "id", + "type" ], "properties": { "id": { @@ -8420,8 +8447,8 @@ "ResourceIdLayer": { "type": "object", "required": [ - "type", - "id" + "id", + "type" ], "properties": { "id": { @@ -8438,8 +8465,8 @@ "ResourceIdLayerCollection": { "type": "object", "required": [ - "type", - "id" + "id", + "type" ], "properties": { "id": { @@ -8456,8 +8483,8 @@ "ResourceIdMlModel": { "type": "object", "required": [ - "type", - "id" + "id", + "type" ], "properties": { "id": { @@ -8474,8 +8501,8 @@ "ResourceIdProject": { "type": "object", "required": [ - "type", - "id" + "id", + "type" ], "properties": { "id": { @@ -8561,11 +8588,13 @@ "type": "boolean" }, "filters": { - "type": "array", + "type": [ + "array", + "null" + ], "items": { "type": "string" - }, - "nullable": true + } }, "searchTypes": { "$ref": "#/components/schemas/SearchTypes" @@ -8683,20 +8712,24 @@ ], "properties": { "axisLabels": { - "allOf": [ + "oneOf": [ { - "$ref": "#/components/schemas/AxisLabels" + "type": "null" + }, + { + "$ref": "#/components/schemas/StringPair" } - ], - "nullable": true + ] }, "axisOrder": { - "allOf": [ + "oneOf": [ + { + "type": "null" + }, { "$ref": "#/components/schemas/AxisOrder" } - ], - "nullable": true + ] }, "extent": { "$ref": "#/components/schemas/BoundingBox2D" @@ -8730,6 +8763,28 @@ } } }, + "StaticMetaData": { + "type": "object", + "required": [ + "loadingInfo", + "resultDescriptor", + "type" + ], + "properties": { + "loadingInfo": { + "$ref": "#/components/schemas/OgrSourceDataset" + }, + "resultDescriptor": { + "$ref": "#/components/schemas/VectorResultDescriptor" + }, + "type": { + "type": "string", + "enum": [ + "OgrMetaData" + ] + } + } + }, "StaticNumberParam": { "type": "object", "title": "StaticNumberParam", @@ -8750,6 +8805,14 @@ } } }, + "StringPair": { + "type": "array", + "items": { + "type": "string" + }, + "maxItems": 2, + "minItems": 2 + }, "StrokeParam": { "type": "object", "required": [ @@ -8775,12 +8838,16 @@ "$ref": "#/components/schemas/DataPath" }, "layerName": { - "type": "string", - "nullable": true + "type": [ + "string", + "null" + ] }, "mainFile": { - "type": "string", - "nullable": true + "type": [ + "string", + "null" + ] } } }, @@ -8834,12 +8901,14 @@ "type": "object", "properties": { "filter": { - "allOf": [ + "oneOf": [ + { + "type": "null" + }, { "$ref": "#/components/schemas/TaskFilter" } - ], - "nullable": true + ] }, "limit": { "type": "integer", @@ -9167,21 +9236,23 @@ ] } }, - "example": { - "type": "MockPointSource", - "params": { - "points": [ - { - "x": 0.0, - "y": 0.1 - }, - { - "x": 1.0, - "y": 1.1 - } - ] + "examples": [ + { + "type": "MockPointSource", + "params": { + "points": [ + { + "x": 0.0, + "y": 0.1 + }, + { + "x": 1.0, + "y": 1.1 + } + ] + } } - } + ] }, "TypedPlotResultDescriptor": { "type": "object", @@ -9192,23 +9263,27 @@ ], "properties": { "bbox": { - "allOf": [ + "oneOf": [ + { + "type": "null" + }, { "$ref": "#/components/schemas/BoundingBox2D" } - ], - "nullable": true + ] }, "spatialReference": { "type": "string" }, "time": { - "allOf": [ + "oneOf": [ + { + "type": "null" + }, { "$ref": "#/components/schemas/TimeInterval" } - ], - "nullable": true + ] }, "type": { "type": "string", @@ -9232,34 +9307,40 @@ "$ref": "#/components/schemas/RasterBandDescriptors" }, "bbox": { - "allOf": [ + "oneOf": [ + { + "type": "null" + }, { "$ref": "#/components/schemas/SpatialPartition2D" } - ], - "nullable": true + ] }, "dataType": { "$ref": "#/components/schemas/RasterDataType" }, "resolution": { - "allOf": [ + "oneOf": [ + { + "type": "null" + }, { "$ref": "#/components/schemas/SpatialResolution" } - ], - "nullable": true + ] }, "spatialReference": { "type": "string" }, "time": { - "allOf": [ + "oneOf": [ + { + "type": "null" + }, { "$ref": "#/components/schemas/TimeInterval" } - ], - "nullable": true + ] }, "type": { "type": "string", @@ -9300,17 +9381,22 @@ ], "properties": { "bbox": { - "allOf": [ + "oneOf": [ + { + "type": "null" + }, { "$ref": "#/components/schemas/BoundingBox2D" } - ], - "nullable": true + ] }, "columns": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/VectorColumnInfo" + }, + "propertyNames": { + "type": "string" } }, "dataType": { @@ -9320,12 +9406,14 @@ "type": "string" }, "time": { - "allOf": [ + "oneOf": [ + { + "type": "null" + }, { "$ref": "#/components/schemas/TimeInterval" } - ], - "nullable": true + ] }, "type": { "type": "string", @@ -9400,6 +9488,9 @@ "description": "metadata used for loading the data", "additionalProperties": { "type": "string" + }, + "propertyNames": { + "type": "string" } }, "name": { @@ -9414,12 +9505,14 @@ "description": "properties, for instance, to be rendered in the UI" }, "symbology": { - "allOf": [ + "oneOf": [ + { + "type": "null" + }, { "$ref": "#/components/schemas/Symbology" } - ], - "nullable": true + ] }, "workflow": { "$ref": "#/components/schemas/Workflow" @@ -9456,45 +9549,57 @@ ], "properties": { "bounds": { - "allOf": [ + "oneOf": [ + { + "type": "null" + }, { "$ref": "#/components/schemas/STRectangle" } - ], - "nullable": true + ] }, "description": { - "type": "string", - "nullable": true + "type": [ + "string", + "null" + ] }, "id": { "$ref": "#/components/schemas/ProjectId" }, "layers": { - "type": "array", + "type": [ + "array", + "null" + ], "items": { - "$ref": "#/components/schemas/LayerUpdate" - }, - "nullable": true + "$ref": "#/components/schemas/VecUpdate" + } }, "name": { - "type": "string", - "nullable": true + "type": [ + "string", + "null" + ] }, "plots": { - "type": "array", + "type": [ + "array", + "null" + ], "items": { - "$ref": "#/components/schemas/PlotUpdate" - }, - "nullable": true + "$ref": "#/components/schemas/VecUpdate" + } }, "timeStep": { - "allOf": [ + "oneOf": [ + { + "type": "null" + }, { "$ref": "#/components/schemas/TimeStep" } - ], - "nullable": true + ] } }, "example": { @@ -9641,15 +9746,19 @@ ], "properties": { "email": { - "type": "string", - "nullable": true + "type": [ + "string", + "null" + ] }, "id": { "$ref": "#/components/schemas/UserId" }, "realName": { - "type": "string", - "nullable": true + "type": [ + "string", + "null" + ] } } }, @@ -9695,12 +9804,14 @@ "$ref": "#/components/schemas/SessionId" }, "project": { - "allOf": [ + "oneOf": [ + { + "type": "null" + }, { "$ref": "#/components/schemas/ProjectId" } - ], - "nullable": true + ] }, "roles": { "type": "array", @@ -9716,15 +9827,27 @@ "format": "date-time" }, "view": { - "allOf": [ + "oneOf": [ + { + "type": "null" + }, { "$ref": "#/components/schemas/STRectangle" } - ], - "nullable": true + ] } } }, + "VecUpdate": { + "oneOf": [ + { + "$ref": "#/components/schemas/ProjectUpdateToken" + }, + { + "$ref": "#/components/schemas/Plot" + } + ] + }, "VectorColumnInfo": { "type": "object", "required": [ @@ -9750,26 +9873,6 @@ "MultiPolygon" ] }, - "VectorQueryRectangle": { - "type": "object", - "description": "A spatio-temporal rectangle with a specified resolution", - "required": [ - "spatialBounds", - "timeInterval", - "spatialResolution" - ], - "properties": { - "spatialBounds": { - "$ref": "#/components/schemas/BoundingBox2D" - }, - "spatialResolution": { - "$ref": "#/components/schemas/SpatialResolution" - }, - "timeInterval": { - "$ref": "#/components/schemas/TimeInterval" - } - } - }, "VectorResultDescriptor": { "type": "object", "required": [ @@ -9779,17 +9882,22 @@ ], "properties": { "bbox": { - "allOf": [ + "oneOf": [ + { + "type": "null" + }, { "$ref": "#/components/schemas/BoundingBox2D" } - ], - "nullable": true + ] }, "columns": { "type": "object", "additionalProperties": { "$ref": "#/components/schemas/VectorColumnInfo" + }, + "propertyNames": { + "type": "string" } }, "dataType": { @@ -9799,27 +9907,29 @@ "type": "string" }, "time": { - "allOf": [ + "oneOf": [ + { + "type": "null" + }, { "$ref": "#/components/schemas/TimeInterval" } - ], - "nullable": true + ] } } }, "Volume": { "type": "object", "required": [ - "name" + "name", + "path" ], "properties": { "name": { - "type": "string" + "$ref": "#/components/schemas/VolumeName" }, "path": { - "type": "string", - "nullable": true + "type": "string" } } }, @@ -9854,8 +9964,10 @@ } }, "spatial_reference": { - "type": "string", - "nullable": true + "type": [ + "string", + "null" + ] } } }, @@ -9903,9 +10015,6 @@ "allOf": [ { "$ref": "#/components/schemas/TypedOperator" - }, - { - "type": "object" } ] }, @@ -10049,8 +10158,12 @@ "content": { "image/png": { "schema": { - "type": "string", - "format": "binary" + "type": "array", + "items": { + "type": "integer", + "format": "int32", + "minimum": 0 + } }, "example": "image bytes" } @@ -10153,8 +10266,12 @@ "content": { "application/zip": { "schema": { - "type": "string", - "format": "binary" + "type": "array", + "items": { + "type": "integer", + "format": "int32", + "minimum": 0 + } }, "example": "zip bytes" } diff --git a/python/.github/workflows/python.yml b/python/.github/workflows/python.yml index a02d9803..1cd345df 100644 --- a/python/.github/workflows/python.yml +++ b/python/.github/workflows/python.yml @@ -13,10 +13,10 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] + python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v4 with: @@ -24,15 +24,8 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip - pip install flake8 pytest - if [ -f requirements.txt ]; then pip install -r requirements.txt; fi - if [ -f test-requirements.txt ]; then pip install -r test-requirements.txt; fi - - name: Lint with flake8 - run: | - # stop the build if there are Python syntax errors or undefined names - flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics - # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide - flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics + pip install -r requirements.txt + pip install -r test-requirements.txt - name: Test with pytest run: | - pytest + pytest --cov={{packageName}} diff --git a/python/.gitlab-ci.yml b/python/.gitlab-ci.yml index 6a189db0..94769389 100644 --- a/python/.gitlab-ci.yml +++ b/python/.gitlab-ci.yml @@ -14,9 +14,6 @@ stages: - pip install -r test-requirements.txt - pytest --cov=geoengine_openapi_client -pytest-3.7: - extends: .pytest - image: python:3.7-alpine pytest-3.8: extends: .pytest image: python:3.8-alpine @@ -29,3 +26,6 @@ pytest-3.10: pytest-3.11: extends: .pytest image: python:3.11-alpine +pytest-3.12: + extends: .pytest + image: python:3.12-alpine diff --git a/python/.openapi-generator/FILES b/python/.openapi-generator/FILES index 553a46e3..3c517a78 100644 --- a/python/.openapi-generator/FILES +++ b/python/.openapi-generator/FILES @@ -3,7 +3,6 @@ .gitlab-ci.yml .travis.yml README.md -docs/AddCollection200Response.md docs/AddDataset.md docs/AddLayer.md docs/AddLayerCollection.md @@ -24,7 +23,6 @@ docs/ComputationQuota.md docs/ContinuousMeasurement.md docs/Coordinate2D.md docs/CreateDataset.md -docs/CreateDatasetHandler200Response.md docs/CreateProject.md docs/CsvHeader.md docs/DataId.md @@ -38,7 +36,6 @@ docs/DatasetDefinition.md docs/DatasetListing.md docs/DatasetResource.md docs/DatasetsApi.md -docs/DateTime.md docs/DerivedColor.md docs/DerivedNumber.md docs/DescribeCoverageRequest.md @@ -69,6 +66,9 @@ docs/GetLegendGraphicRequest.md docs/GetMapExceptionFormat.md docs/GetMapFormat.md docs/GetMapRequest.md +docs/InlineObject.md +docs/InlineObject1.md +docs/InlineObject2.md docs/InternalDataId.md docs/Layer.md docs/LayerCollection.md @@ -76,7 +76,6 @@ docs/LayerCollectionListing.md docs/LayerCollectionResource.md docs/LayerListing.md docs/LayerResource.md -docs/LayerUpdate.md docs/LayerVisibility.md docs/LayersApi.md docs/LineSymbology.md @@ -91,7 +90,6 @@ docs/MlModelMetadata.md docs/MlModelNameResponse.md docs/MlModelResource.md docs/MockDatasetDataSourceLoadingInfo.md -docs/MockMetaData.md docs/MultiBandRasterColorizer.md docs/MultiLineString.md docs/MultiPoint.md @@ -100,7 +98,6 @@ docs/NumberParam.md docs/OGCWCSApi.md docs/OGCWFSApi.md docs/OGCWMSApi.md -docs/OgrMetaData.md docs/OgrSourceColumnSpec.md docs/OgrSourceDataset.md docs/OgrSourceDatasetTimeType.md @@ -114,9 +111,9 @@ docs/OgrSourceDurationSpecValue.md docs/OgrSourceDurationSpecZero.md docs/OgrSourceErrorSpec.md docs/OgrSourceTimeFormat.md -docs/OgrSourceTimeFormatAuto.md -docs/OgrSourceTimeFormatCustom.md -docs/OgrSourceTimeFormatUnixTimeStamp.md +docs/OgrSourceTimeFormatOneOf.md +docs/OgrSourceTimeFormatOneOf1.md +docs/OgrSourceTimeFormatOneOf2.md docs/OperatorQuota.md docs/OrderBy.md docs/PaletteColorizer.md @@ -127,9 +124,7 @@ docs/PermissionRequest.md docs/PermissionsApi.md docs/Plot.md docs/PlotOutputFormat.md -docs/PlotQueryRectangle.md docs/PlotResultDescriptor.md -docs/PlotUpdate.md docs/PlotsApi.md docs/PointSymbology.md docs/PolygonSymbology.md @@ -147,6 +142,7 @@ docs/Provenances.md docs/ProviderCapabilities.md docs/ProviderLayerCollectionId.md docs/ProviderLayerId.md +docs/QueryRectangle.md docs/Quota.md docs/RasterBandDescriptor.md docs/RasterColorizer.md @@ -155,7 +151,6 @@ docs/RasterDatasetFromWorkflow.md docs/RasterDatasetFromWorkflowResult.md docs/RasterPropertiesEntryType.md docs/RasterPropertiesKey.md -docs/RasterQueryRectangle.md docs/RasterResultDescriptor.md docs/RasterStreamWebsocketResultType.md docs/RasterSymbology.md @@ -180,6 +175,7 @@ docs/SpatialReferenceAuthority.md docs/SpatialReferenceSpecification.md docs/SpatialReferencesApi.md docs/SpatialResolution.md +docs/StaticMetaData.md docs/StaticNumberParam.md docs/StrokeParam.md docs/SuggestMetaData.md @@ -227,9 +223,9 @@ docs/UserCredentials.md docs/UserInfo.md docs/UserRegistration.md docs/UserSession.md +docs/VecUpdate.md docs/VectorColumnInfo.md docs/VectorDataType.md -docs/VectorQueryRectangle.md docs/VectorResultDescriptor.md docs/Volume.md docs/VolumeFileLayersResponse.md @@ -266,7 +262,6 @@ geoengine_openapi_client/api_response.py geoengine_openapi_client/configuration.py geoengine_openapi_client/exceptions.py geoengine_openapi_client/models/__init__.py -geoengine_openapi_client/models/add_collection200_response.py geoengine_openapi_client/models/add_dataset.py geoengine_openapi_client/models/add_layer.py geoengine_openapi_client/models/add_layer_collection.py @@ -287,7 +282,6 @@ geoengine_openapi_client/models/computation_quota.py geoengine_openapi_client/models/continuous_measurement.py geoengine_openapi_client/models/coordinate2_d.py geoengine_openapi_client/models/create_dataset.py -geoengine_openapi_client/models/create_dataset_handler200_response.py geoengine_openapi_client/models/create_project.py geoengine_openapi_client/models/csv_header.py geoengine_openapi_client/models/data_id.py @@ -300,7 +294,6 @@ geoengine_openapi_client/models/dataset.py geoengine_openapi_client/models/dataset_definition.py geoengine_openapi_client/models/dataset_listing.py geoengine_openapi_client/models/dataset_resource.py -geoengine_openapi_client/models/date_time.py geoengine_openapi_client/models/derived_color.py geoengine_openapi_client/models/derived_number.py geoengine_openapi_client/models/describe_coverage_request.py @@ -330,6 +323,9 @@ geoengine_openapi_client/models/get_legend_graphic_request.py geoengine_openapi_client/models/get_map_exception_format.py geoengine_openapi_client/models/get_map_format.py geoengine_openapi_client/models/get_map_request.py +geoengine_openapi_client/models/inline_object.py +geoengine_openapi_client/models/inline_object1.py +geoengine_openapi_client/models/inline_object2.py geoengine_openapi_client/models/internal_data_id.py geoengine_openapi_client/models/layer.py geoengine_openapi_client/models/layer_collection.py @@ -337,7 +333,6 @@ geoengine_openapi_client/models/layer_collection_listing.py geoengine_openapi_client/models/layer_collection_resource.py geoengine_openapi_client/models/layer_listing.py geoengine_openapi_client/models/layer_resource.py -geoengine_openapi_client/models/layer_update.py geoengine_openapi_client/models/layer_visibility.py geoengine_openapi_client/models/line_symbology.py geoengine_openapi_client/models/linear_gradient.py @@ -350,13 +345,11 @@ geoengine_openapi_client/models/ml_model_metadata.py geoengine_openapi_client/models/ml_model_name_response.py geoengine_openapi_client/models/ml_model_resource.py geoengine_openapi_client/models/mock_dataset_data_source_loading_info.py -geoengine_openapi_client/models/mock_meta_data.py geoengine_openapi_client/models/multi_band_raster_colorizer.py geoengine_openapi_client/models/multi_line_string.py geoengine_openapi_client/models/multi_point.py geoengine_openapi_client/models/multi_polygon.py geoengine_openapi_client/models/number_param.py -geoengine_openapi_client/models/ogr_meta_data.py geoengine_openapi_client/models/ogr_source_column_spec.py geoengine_openapi_client/models/ogr_source_dataset.py geoengine_openapi_client/models/ogr_source_dataset_time_type.py @@ -370,9 +363,9 @@ geoengine_openapi_client/models/ogr_source_duration_spec_value.py geoengine_openapi_client/models/ogr_source_duration_spec_zero.py geoengine_openapi_client/models/ogr_source_error_spec.py geoengine_openapi_client/models/ogr_source_time_format.py -geoengine_openapi_client/models/ogr_source_time_format_auto.py -geoengine_openapi_client/models/ogr_source_time_format_custom.py -geoengine_openapi_client/models/ogr_source_time_format_unix_time_stamp.py +geoengine_openapi_client/models/ogr_source_time_format_one_of.py +geoengine_openapi_client/models/ogr_source_time_format_one_of1.py +geoengine_openapi_client/models/ogr_source_time_format_one_of2.py geoengine_openapi_client/models/operator_quota.py geoengine_openapi_client/models/order_by.py geoengine_openapi_client/models/palette_colorizer.py @@ -382,9 +375,7 @@ geoengine_openapi_client/models/permission_listing.py geoengine_openapi_client/models/permission_request.py geoengine_openapi_client/models/plot.py geoengine_openapi_client/models/plot_output_format.py -geoengine_openapi_client/models/plot_query_rectangle.py geoengine_openapi_client/models/plot_result_descriptor.py -geoengine_openapi_client/models/plot_update.py geoengine_openapi_client/models/point_symbology.py geoengine_openapi_client/models/polygon_symbology.py geoengine_openapi_client/models/project.py @@ -400,6 +391,7 @@ geoengine_openapi_client/models/provenances.py geoengine_openapi_client/models/provider_capabilities.py geoengine_openapi_client/models/provider_layer_collection_id.py geoengine_openapi_client/models/provider_layer_id.py +geoengine_openapi_client/models/query_rectangle.py geoengine_openapi_client/models/quota.py geoengine_openapi_client/models/raster_band_descriptor.py geoengine_openapi_client/models/raster_colorizer.py @@ -408,7 +400,6 @@ geoengine_openapi_client/models/raster_dataset_from_workflow.py geoengine_openapi_client/models/raster_dataset_from_workflow_result.py geoengine_openapi_client/models/raster_properties_entry_type.py geoengine_openapi_client/models/raster_properties_key.py -geoengine_openapi_client/models/raster_query_rectangle.py geoengine_openapi_client/models/raster_result_descriptor.py geoengine_openapi_client/models/raster_stream_websocket_result_type.py geoengine_openapi_client/models/raster_symbology.py @@ -431,6 +422,7 @@ geoengine_openapi_client/models/spatial_reference_authority.py geoengine_openapi_client/models/spatial_reference_specification.py geoengine_openapi_client/models/spatial_resolution.py geoengine_openapi_client/models/st_rectangle.py +geoengine_openapi_client/models/static_meta_data.py geoengine_openapi_client/models/static_number_param.py geoengine_openapi_client/models/stroke_param.py geoengine_openapi_client/models/suggest_meta_data.py @@ -475,9 +467,9 @@ geoengine_openapi_client/models/user_credentials.py geoengine_openapi_client/models/user_info.py geoengine_openapi_client/models/user_registration.py geoengine_openapi_client/models/user_session.py +geoengine_openapi_client/models/vec_update.py geoengine_openapi_client/models/vector_column_info.py geoengine_openapi_client/models/vector_data_type.py -geoengine_openapi_client/models/vector_query_rectangle.py geoengine_openapi_client/models/vector_result_descriptor.py geoengine_openapi_client/models/volume.py geoengine_openapi_client/models/volume_file_layers_response.py @@ -499,7 +491,6 @@ setup.cfg setup.py test-requirements.txt test/__init__.py -test/test_add_collection200_response.py test/test_add_dataset.py test/test_add_layer.py test/test_add_layer_collection.py @@ -520,7 +511,6 @@ test/test_computation_quota.py test/test_continuous_measurement.py test/test_coordinate2_d.py test/test_create_dataset.py -test/test_create_dataset_handler200_response.py test/test_create_project.py test/test_csv_header.py test/test_data_id.py @@ -534,7 +524,6 @@ test/test_dataset_definition.py test/test_dataset_listing.py test/test_dataset_resource.py test/test_datasets_api.py -test/test_date_time.py test/test_derived_color.py test/test_derived_number.py test/test_describe_coverage_request.py @@ -565,6 +554,9 @@ test/test_get_legend_graphic_request.py test/test_get_map_exception_format.py test/test_get_map_format.py test/test_get_map_request.py +test/test_inline_object.py +test/test_inline_object1.py +test/test_inline_object2.py test/test_internal_data_id.py test/test_layer.py test/test_layer_collection.py @@ -572,7 +564,6 @@ test/test_layer_collection_listing.py test/test_layer_collection_resource.py test/test_layer_listing.py test/test_layer_resource.py -test/test_layer_update.py test/test_layer_visibility.py test/test_layers_api.py test/test_line_symbology.py @@ -587,7 +578,6 @@ test/test_ml_model_metadata.py test/test_ml_model_name_response.py test/test_ml_model_resource.py test/test_mock_dataset_data_source_loading_info.py -test/test_mock_meta_data.py test/test_multi_band_raster_colorizer.py test/test_multi_line_string.py test/test_multi_point.py @@ -596,7 +586,6 @@ test/test_number_param.py test/test_ogcwcs_api.py test/test_ogcwfs_api.py test/test_ogcwms_api.py -test/test_ogr_meta_data.py test/test_ogr_source_column_spec.py test/test_ogr_source_dataset.py test/test_ogr_source_dataset_time_type.py @@ -610,9 +599,9 @@ test/test_ogr_source_duration_spec_value.py test/test_ogr_source_duration_spec_zero.py test/test_ogr_source_error_spec.py test/test_ogr_source_time_format.py -test/test_ogr_source_time_format_auto.py -test/test_ogr_source_time_format_custom.py -test/test_ogr_source_time_format_unix_time_stamp.py +test/test_ogr_source_time_format_one_of.py +test/test_ogr_source_time_format_one_of1.py +test/test_ogr_source_time_format_one_of2.py test/test_operator_quota.py test/test_order_by.py test/test_palette_colorizer.py @@ -623,9 +612,7 @@ test/test_permission_request.py test/test_permissions_api.py test/test_plot.py test/test_plot_output_format.py -test/test_plot_query_rectangle.py test/test_plot_result_descriptor.py -test/test_plot_update.py test/test_plots_api.py test/test_point_symbology.py test/test_polygon_symbology.py @@ -643,6 +630,7 @@ test/test_provenances.py test/test_provider_capabilities.py test/test_provider_layer_collection_id.py test/test_provider_layer_id.py +test/test_query_rectangle.py test/test_quota.py test/test_raster_band_descriptor.py test/test_raster_colorizer.py @@ -651,7 +639,6 @@ test/test_raster_dataset_from_workflow.py test/test_raster_dataset_from_workflow_result.py test/test_raster_properties_entry_type.py test/test_raster_properties_key.py -test/test_raster_query_rectangle.py test/test_raster_result_descriptor.py test/test_raster_stream_websocket_result_type.py test/test_raster_symbology.py @@ -676,6 +663,7 @@ test/test_spatial_reference_specification.py test/test_spatial_references_api.py test/test_spatial_resolution.py test/test_st_rectangle.py +test/test_static_meta_data.py test/test_static_number_param.py test/test_stroke_param.py test/test_suggest_meta_data.py @@ -723,9 +711,9 @@ test/test_user_credentials.py test/test_user_info.py test/test_user_registration.py test/test_user_session.py +test/test_vec_update.py test/test_vector_column_info.py test/test_vector_data_type.py -test/test_vector_query_rectangle.py test/test_vector_result_descriptor.py test/test_volume.py test/test_volume_file_layers_response.py diff --git a/python/.openapi-generator/VERSION b/python/.openapi-generator/VERSION index 73a86b19..b23eb275 100644 --- a/python/.openapi-generator/VERSION +++ b/python/.openapi-generator/VERSION @@ -1 +1 @@ -7.0.1 \ No newline at end of file +7.11.0 diff --git a/python/.travis.yml b/python/.travis.yml index df26d6f0..ba59ea96 100644 --- a/python/.travis.yml +++ b/python/.travis.yml @@ -1,13 +1,13 @@ # ref: https://docs.travis-ci.com/user/languages/python language: python python: - - "3.7" - "3.8" - "3.9" - "3.10" - "3.11" + - "3.12" # uncomment the following if needed - #- "3.11-dev" # 3.11 development branch + #- "3.12-dev" # 3.12 development branch #- "nightly" # nightly build # command to install dependencies install: diff --git a/python/README.md b/python/README.md index ba40002a..b69d40fe 100644 --- a/python/README.md +++ b/python/README.md @@ -5,11 +5,12 @@ This Python package is automatically generated by the [OpenAPI Generator](https: - API version: 0.8.0 - Package version: 0.0.19 +- Generator version: 7.11.0 - Build package: org.openapitools.codegen.languages.PythonClientCodegen ## Requirements. -Python 3.7+ +Python 3.8+ ## Installation & Usage ### pip install @@ -50,15 +51,14 @@ Please follow the [installation procedure](#installation--usage) and then run th ```python -import time import geoengine_openapi_client from geoengine_openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to http://127.0.0.1:3030/api +# Defining the host is optional and defaults to https://geoengine.io/api # See configuration.py for a list of all supported configuration parameters. configuration = geoengine_openapi_client.Configuration( - host = "http://127.0.0.1:3030/api" + host = "https://geoengine.io/api" ) # The client must configure the authentication and authorization parameters @@ -79,7 +79,7 @@ with geoengine_openapi_client.ApiClient(configuration) as api_client: auto_create_dataset = geoengine_openapi_client.AutoCreateDataset() # AutoCreateDataset | try: - # Creates a new dataset using previously uploaded files. + # Creates a new dataset using previously uploaded files. The format of the files will be automatically detected when possible. api_response = api_instance.auto_create_dataset_handler(auto_create_dataset) print("The response of DatasetsApi->auto_create_dataset_handler:\n") pprint(api_response) @@ -90,19 +90,19 @@ with geoengine_openapi_client.ApiClient(configuration) as api_client: ## Documentation for API Endpoints -All URIs are relative to *http://127.0.0.1:3030/api* +All URIs are relative to *https://geoengine.io/api* Class | Method | HTTP request | Description ------------ | ------------- | ------------- | ------------- -*DatasetsApi* | [**auto_create_dataset_handler**](docs/DatasetsApi.md#auto_create_dataset_handler) | **POST** /dataset/auto | Creates a new dataset using previously uploaded files. -*DatasetsApi* | [**create_dataset_handler**](docs/DatasetsApi.md#create_dataset_handler) | **POST** /dataset | Creates a new dataset referencing files. +*DatasetsApi* | [**auto_create_dataset_handler**](docs/DatasetsApi.md#auto_create_dataset_handler) | **POST** /dataset/auto | Creates a new dataset using previously uploaded files. The format of the files will be automatically detected when possible. +*DatasetsApi* | [**create_dataset_handler**](docs/DatasetsApi.md#create_dataset_handler) | **POST** /dataset | Creates a new dataset referencing files. Users can reference previously uploaded files. Admins can reference files from a volume. *DatasetsApi* | [**delete_dataset_handler**](docs/DatasetsApi.md#delete_dataset_handler) | **DELETE** /dataset/{dataset} | Delete a dataset *DatasetsApi* | [**get_dataset_handler**](docs/DatasetsApi.md#get_dataset_handler) | **GET** /dataset/{dataset} | Retrieves details about a dataset using the internal name. *DatasetsApi* | [**get_loading_info_handler**](docs/DatasetsApi.md#get_loading_info_handler) | **GET** /dataset/{dataset}/loadingInfo | Retrieves the loading information of a dataset *DatasetsApi* | [**list_datasets_handler**](docs/DatasetsApi.md#list_datasets_handler) | **GET** /datasets | Lists available datasets. *DatasetsApi* | [**list_volume_file_layers_handler**](docs/DatasetsApi.md#list_volume_file_layers_handler) | **GET** /dataset/volumes/{volume_name}/files/{file_name}/layers | List the layers of a file in a volume. *DatasetsApi* | [**list_volumes_handler**](docs/DatasetsApi.md#list_volumes_handler) | **GET** /dataset/volumes | Lists available volumes. -*DatasetsApi* | [**suggest_meta_data_handler**](docs/DatasetsApi.md#suggest_meta_data_handler) | **POST** /dataset/suggest | Inspects an upload and suggests metadata that can be used when creating a new dataset based on it. +*DatasetsApi* | [**suggest_meta_data_handler**](docs/DatasetsApi.md#suggest_meta_data_handler) | **POST** /dataset/suggest | Inspects an upload and suggests metadata that can be used when creating a new dataset based on it. Tries to automatically detect the main file and layer name if not specified. *DatasetsApi* | [**update_dataset_handler**](docs/DatasetsApi.md#update_dataset_handler) | **POST** /dataset/{dataset} | Update details about a dataset using the internal name. *DatasetsApi* | [**update_dataset_provenance_handler**](docs/DatasetsApi.md#update_dataset_provenance_handler) | **PUT** /dataset/{dataset}/provenance | *DatasetsApi* | [**update_dataset_symbology_handler**](docs/DatasetsApi.md#update_dataset_symbology_handler) | **PUT** /dataset/{dataset}/symbology | Updates the dataset's symbology @@ -148,12 +148,12 @@ Class | Method | HTTP request | Description *ProjectsApi* | [**load_project_latest_handler**](docs/ProjectsApi.md#load_project_latest_handler) | **GET** /project/{project} | Retrieves details about the latest version of a project. *ProjectsApi* | [**load_project_version_handler**](docs/ProjectsApi.md#load_project_version_handler) | **GET** /project/{project}/{version} | Retrieves details about the given version of a project. *ProjectsApi* | [**project_versions_handler**](docs/ProjectsApi.md#project_versions_handler) | **GET** /project/{project}/versions | Lists all available versions of a project. -*ProjectsApi* | [**update_project_handler**](docs/ProjectsApi.md#update_project_handler) | **PATCH** /project/{project} | Updates a project. +*ProjectsApi* | [**update_project_handler**](docs/ProjectsApi.md#update_project_handler) | **PATCH** /project/{project} | Updates a project. This will create a new version. *SessionApi* | [**anonymous_handler**](docs/SessionApi.md#anonymous_handler) | **POST** /anonymous | Creates session for anonymous user. The session's id serves as a Bearer token for requests. *SessionApi* | [**login_handler**](docs/SessionApi.md#login_handler) | **POST** /login | Creates a session by providing user credentials. The session's id serves as a Bearer token for requests. *SessionApi* | [**logout_handler**](docs/SessionApi.md#logout_handler) | **POST** /logout | Ends a session. *SessionApi* | [**oidc_init**](docs/SessionApi.md#oidc_init) | **POST** /oidcInit | Initializes the Open Id Connect login procedure by requesting a parametrized url to the configured Id Provider. -*SessionApi* | [**oidc_login**](docs/SessionApi.md#oidc_login) | **POST** /oidcLogin | Creates a session for a user via a login with Open Id Connect. +*SessionApi* | [**oidc_login**](docs/SessionApi.md#oidc_login) | **POST** /oidcLogin | Creates a session for a user via a login with Open Id Connect. This call must be preceded by a call to oidcInit and match the parameters of that call. *SessionApi* | [**register_user_handler**](docs/SessionApi.md#register_user_handler) | **POST** /user | Registers a user. *SessionApi* | [**session_handler**](docs/SessionApi.md#session_handler) | **GET** /session | Retrieves details about the current session. *SpatialReferencesApi* | [**get_spatial_reference_specification_handler**](docs/SpatialReferencesApi.md#get_spatial_reference_specification_handler) | **GET** /spatialReferenceSpecification/{srsString} | @@ -176,7 +176,7 @@ Class | Method | HTTP request | Description *UserApi* | [**remove_role_handler**](docs/UserApi.md#remove_role_handler) | **DELETE** /roles/{role} | Remove a role. Requires admin privilige. *UserApi* | [**revoke_role_handler**](docs/UserApi.md#revoke_role_handler) | **DELETE** /users/{user}/roles/{role} | Revoke a role from a user. Requires admin privilige. *UserApi* | [**update_user_quota_handler**](docs/UserApi.md#update_user_quota_handler) | **POST** /quotas/{user} | Update the available quota of a specific user. -*WorkflowsApi* | [**dataset_from_workflow_handler**](docs/WorkflowsApi.md#dataset_from_workflow_handler) | **POST** /datasetFromWorkflow/{id} | Create a task for creating a new dataset from the result of the workflow given by its `id` and the dataset parameters in the request body. +*WorkflowsApi* | [**dataset_from_workflow_handler**](docs/WorkflowsApi.md#dataset_from_workflow_handler) | **POST** /datasetFromWorkflow/{id} | Create a task for creating a new dataset from the result of the workflow given by its `id` and the dataset parameters in the request body. Returns the id of the created task *WorkflowsApi* | [**get_workflow_all_metadata_zip_handler**](docs/WorkflowsApi.md#get_workflow_all_metadata_zip_handler) | **GET** /workflow/{id}/allMetadata/zip | Gets a ZIP archive of the worklow, its provenance and the output metadata. *WorkflowsApi* | [**get_workflow_metadata_handler**](docs/WorkflowsApi.md#get_workflow_metadata_handler) | **GET** /workflow/{id}/metadata | Gets the metadata of a workflow *WorkflowsApi* | [**get_workflow_provenance_handler**](docs/WorkflowsApi.md#get_workflow_provenance_handler) | **GET** /workflow/{id}/provenance | Gets the provenance of all datasets used in a workflow. @@ -187,7 +187,6 @@ Class | Method | HTTP request | Description ## Documentation For Models - - [AddCollection200Response](docs/AddCollection200Response.md) - [AddDataset](docs/AddDataset.md) - [AddLayer](docs/AddLayer.md) - [AddLayerCollection](docs/AddLayerCollection.md) @@ -208,7 +207,6 @@ Class | Method | HTTP request | Description - [ContinuousMeasurement](docs/ContinuousMeasurement.md) - [Coordinate2D](docs/Coordinate2D.md) - [CreateDataset](docs/CreateDataset.md) - - [CreateDatasetHandler200Response](docs/CreateDatasetHandler200Response.md) - [CreateProject](docs/CreateProject.md) - [CsvHeader](docs/CsvHeader.md) - [DataId](docs/DataId.md) @@ -221,7 +219,6 @@ Class | Method | HTTP request | Description - [DatasetDefinition](docs/DatasetDefinition.md) - [DatasetListing](docs/DatasetListing.md) - [DatasetResource](docs/DatasetResource.md) - - [DateTime](docs/DateTime.md) - [DerivedColor](docs/DerivedColor.md) - [DerivedNumber](docs/DerivedNumber.md) - [DescribeCoverageRequest](docs/DescribeCoverageRequest.md) @@ -251,6 +248,9 @@ Class | Method | HTTP request | Description - [GetMapExceptionFormat](docs/GetMapExceptionFormat.md) - [GetMapFormat](docs/GetMapFormat.md) - [GetMapRequest](docs/GetMapRequest.md) + - [InlineObject](docs/InlineObject.md) + - [InlineObject1](docs/InlineObject1.md) + - [InlineObject2](docs/InlineObject2.md) - [InternalDataId](docs/InternalDataId.md) - [Layer](docs/Layer.md) - [LayerCollection](docs/LayerCollection.md) @@ -258,7 +258,6 @@ Class | Method | HTTP request | Description - [LayerCollectionResource](docs/LayerCollectionResource.md) - [LayerListing](docs/LayerListing.md) - [LayerResource](docs/LayerResource.md) - - [LayerUpdate](docs/LayerUpdate.md) - [LayerVisibility](docs/LayerVisibility.md) - [LineSymbology](docs/LineSymbology.md) - [LinearGradient](docs/LinearGradient.md) @@ -271,13 +270,11 @@ Class | Method | HTTP request | Description - [MlModelNameResponse](docs/MlModelNameResponse.md) - [MlModelResource](docs/MlModelResource.md) - [MockDatasetDataSourceLoadingInfo](docs/MockDatasetDataSourceLoadingInfo.md) - - [MockMetaData](docs/MockMetaData.md) - [MultiBandRasterColorizer](docs/MultiBandRasterColorizer.md) - [MultiLineString](docs/MultiLineString.md) - [MultiPoint](docs/MultiPoint.md) - [MultiPolygon](docs/MultiPolygon.md) - [NumberParam](docs/NumberParam.md) - - [OgrMetaData](docs/OgrMetaData.md) - [OgrSourceColumnSpec](docs/OgrSourceColumnSpec.md) - [OgrSourceDataset](docs/OgrSourceDataset.md) - [OgrSourceDatasetTimeType](docs/OgrSourceDatasetTimeType.md) @@ -291,9 +288,9 @@ Class | Method | HTTP request | Description - [OgrSourceDurationSpecZero](docs/OgrSourceDurationSpecZero.md) - [OgrSourceErrorSpec](docs/OgrSourceErrorSpec.md) - [OgrSourceTimeFormat](docs/OgrSourceTimeFormat.md) - - [OgrSourceTimeFormatAuto](docs/OgrSourceTimeFormatAuto.md) - - [OgrSourceTimeFormatCustom](docs/OgrSourceTimeFormatCustom.md) - - [OgrSourceTimeFormatUnixTimeStamp](docs/OgrSourceTimeFormatUnixTimeStamp.md) + - [OgrSourceTimeFormatOneOf](docs/OgrSourceTimeFormatOneOf.md) + - [OgrSourceTimeFormatOneOf1](docs/OgrSourceTimeFormatOneOf1.md) + - [OgrSourceTimeFormatOneOf2](docs/OgrSourceTimeFormatOneOf2.md) - [OperatorQuota](docs/OperatorQuota.md) - [OrderBy](docs/OrderBy.md) - [PaletteColorizer](docs/PaletteColorizer.md) @@ -303,9 +300,7 @@ Class | Method | HTTP request | Description - [PermissionRequest](docs/PermissionRequest.md) - [Plot](docs/Plot.md) - [PlotOutputFormat](docs/PlotOutputFormat.md) - - [PlotQueryRectangle](docs/PlotQueryRectangle.md) - [PlotResultDescriptor](docs/PlotResultDescriptor.md) - - [PlotUpdate](docs/PlotUpdate.md) - [PointSymbology](docs/PointSymbology.md) - [PolygonSymbology](docs/PolygonSymbology.md) - [Project](docs/Project.md) @@ -321,6 +316,7 @@ Class | Method | HTTP request | Description - [ProviderCapabilities](docs/ProviderCapabilities.md) - [ProviderLayerCollectionId](docs/ProviderLayerCollectionId.md) - [ProviderLayerId](docs/ProviderLayerId.md) + - [QueryRectangle](docs/QueryRectangle.md) - [Quota](docs/Quota.md) - [RasterBandDescriptor](docs/RasterBandDescriptor.md) - [RasterColorizer](docs/RasterColorizer.md) @@ -329,7 +325,6 @@ Class | Method | HTTP request | Description - [RasterDatasetFromWorkflowResult](docs/RasterDatasetFromWorkflowResult.md) - [RasterPropertiesEntryType](docs/RasterPropertiesEntryType.md) - [RasterPropertiesKey](docs/RasterPropertiesKey.md) - - [RasterQueryRectangle](docs/RasterQueryRectangle.md) - [RasterResultDescriptor](docs/RasterResultDescriptor.md) - [RasterStreamWebsocketResultType](docs/RasterStreamWebsocketResultType.md) - [RasterSymbology](docs/RasterSymbology.md) @@ -352,6 +347,7 @@ Class | Method | HTTP request | Description - [SpatialReferenceAuthority](docs/SpatialReferenceAuthority.md) - [SpatialReferenceSpecification](docs/SpatialReferenceSpecification.md) - [SpatialResolution](docs/SpatialResolution.md) + - [StaticMetaData](docs/StaticMetaData.md) - [StaticNumberParam](docs/StaticNumberParam.md) - [StrokeParam](docs/StrokeParam.md) - [SuggestMetaData](docs/SuggestMetaData.md) @@ -396,9 +392,9 @@ Class | Method | HTTP request | Description - [UserInfo](docs/UserInfo.md) - [UserRegistration](docs/UserRegistration.md) - [UserSession](docs/UserSession.md) + - [VecUpdate](docs/VecUpdate.md) - [VectorColumnInfo](docs/VectorColumnInfo.md) - [VectorDataType](docs/VectorDataType.md) - - [VectorQueryRectangle](docs/VectorQueryRectangle.md) - [VectorResultDescriptor](docs/VectorResultDescriptor.md) - [Volume](docs/Volume.md) - [VolumeFileLayersResponse](docs/VolumeFileLayersResponse.md) diff --git a/python/geoengine_openapi_client/__init__.py b/python/geoengine_openapi_client/__init__.py index 5ae8a2ec..c377c26f 100644 --- a/python/geoengine_openapi_client/__init__.py +++ b/python/geoengine_openapi_client/__init__.py @@ -47,7 +47,6 @@ from geoengine_openapi_client.exceptions import ApiException # import models into sdk package -from geoengine_openapi_client.models.add_collection200_response import AddCollection200Response from geoengine_openapi_client.models.add_dataset import AddDataset from geoengine_openapi_client.models.add_layer import AddLayer from geoengine_openapi_client.models.add_layer_collection import AddLayerCollection @@ -68,7 +67,6 @@ from geoengine_openapi_client.models.continuous_measurement import ContinuousMeasurement from geoengine_openapi_client.models.coordinate2_d import Coordinate2D from geoengine_openapi_client.models.create_dataset import CreateDataset -from geoengine_openapi_client.models.create_dataset_handler200_response import CreateDatasetHandler200Response from geoengine_openapi_client.models.create_project import CreateProject from geoengine_openapi_client.models.csv_header import CsvHeader from geoengine_openapi_client.models.data_id import DataId @@ -81,7 +79,6 @@ from geoengine_openapi_client.models.dataset_definition import DatasetDefinition from geoengine_openapi_client.models.dataset_listing import DatasetListing from geoengine_openapi_client.models.dataset_resource import DatasetResource -from geoengine_openapi_client.models.date_time import DateTime from geoengine_openapi_client.models.derived_color import DerivedColor from geoengine_openapi_client.models.derived_number import DerivedNumber from geoengine_openapi_client.models.describe_coverage_request import DescribeCoverageRequest @@ -111,6 +108,9 @@ from geoengine_openapi_client.models.get_map_exception_format import GetMapExceptionFormat from geoengine_openapi_client.models.get_map_format import GetMapFormat from geoengine_openapi_client.models.get_map_request import GetMapRequest +from geoengine_openapi_client.models.inline_object import InlineObject +from geoengine_openapi_client.models.inline_object1 import InlineObject1 +from geoengine_openapi_client.models.inline_object2 import InlineObject2 from geoengine_openapi_client.models.internal_data_id import InternalDataId from geoengine_openapi_client.models.layer import Layer from geoengine_openapi_client.models.layer_collection import LayerCollection @@ -118,7 +118,6 @@ from geoengine_openapi_client.models.layer_collection_resource import LayerCollectionResource from geoengine_openapi_client.models.layer_listing import LayerListing from geoengine_openapi_client.models.layer_resource import LayerResource -from geoengine_openapi_client.models.layer_update import LayerUpdate from geoengine_openapi_client.models.layer_visibility import LayerVisibility from geoengine_openapi_client.models.line_symbology import LineSymbology from geoengine_openapi_client.models.linear_gradient import LinearGradient @@ -131,13 +130,11 @@ from geoengine_openapi_client.models.ml_model_name_response import MlModelNameResponse from geoengine_openapi_client.models.ml_model_resource import MlModelResource from geoengine_openapi_client.models.mock_dataset_data_source_loading_info import MockDatasetDataSourceLoadingInfo -from geoengine_openapi_client.models.mock_meta_data import MockMetaData from geoengine_openapi_client.models.multi_band_raster_colorizer import MultiBandRasterColorizer from geoengine_openapi_client.models.multi_line_string import MultiLineString from geoengine_openapi_client.models.multi_point import MultiPoint from geoengine_openapi_client.models.multi_polygon import MultiPolygon from geoengine_openapi_client.models.number_param import NumberParam -from geoengine_openapi_client.models.ogr_meta_data import OgrMetaData from geoengine_openapi_client.models.ogr_source_column_spec import OgrSourceColumnSpec from geoengine_openapi_client.models.ogr_source_dataset import OgrSourceDataset from geoengine_openapi_client.models.ogr_source_dataset_time_type import OgrSourceDatasetTimeType @@ -151,9 +148,9 @@ from geoengine_openapi_client.models.ogr_source_duration_spec_zero import OgrSourceDurationSpecZero from geoengine_openapi_client.models.ogr_source_error_spec import OgrSourceErrorSpec from geoengine_openapi_client.models.ogr_source_time_format import OgrSourceTimeFormat -from geoengine_openapi_client.models.ogr_source_time_format_auto import OgrSourceTimeFormatAuto -from geoengine_openapi_client.models.ogr_source_time_format_custom import OgrSourceTimeFormatCustom -from geoengine_openapi_client.models.ogr_source_time_format_unix_time_stamp import OgrSourceTimeFormatUnixTimeStamp +from geoengine_openapi_client.models.ogr_source_time_format_one_of import OgrSourceTimeFormatOneOf +from geoengine_openapi_client.models.ogr_source_time_format_one_of1 import OgrSourceTimeFormatOneOf1 +from geoengine_openapi_client.models.ogr_source_time_format_one_of2 import OgrSourceTimeFormatOneOf2 from geoengine_openapi_client.models.operator_quota import OperatorQuota from geoengine_openapi_client.models.order_by import OrderBy from geoengine_openapi_client.models.palette_colorizer import PaletteColorizer @@ -163,9 +160,7 @@ from geoengine_openapi_client.models.permission_request import PermissionRequest from geoengine_openapi_client.models.plot import Plot from geoengine_openapi_client.models.plot_output_format import PlotOutputFormat -from geoengine_openapi_client.models.plot_query_rectangle import PlotQueryRectangle from geoengine_openapi_client.models.plot_result_descriptor import PlotResultDescriptor -from geoengine_openapi_client.models.plot_update import PlotUpdate from geoengine_openapi_client.models.point_symbology import PointSymbology from geoengine_openapi_client.models.polygon_symbology import PolygonSymbology from geoengine_openapi_client.models.project import Project @@ -181,6 +176,7 @@ from geoengine_openapi_client.models.provider_capabilities import ProviderCapabilities from geoengine_openapi_client.models.provider_layer_collection_id import ProviderLayerCollectionId from geoengine_openapi_client.models.provider_layer_id import ProviderLayerId +from geoengine_openapi_client.models.query_rectangle import QueryRectangle from geoengine_openapi_client.models.quota import Quota from geoengine_openapi_client.models.raster_band_descriptor import RasterBandDescriptor from geoengine_openapi_client.models.raster_colorizer import RasterColorizer @@ -189,7 +185,6 @@ from geoengine_openapi_client.models.raster_dataset_from_workflow_result import RasterDatasetFromWorkflowResult from geoengine_openapi_client.models.raster_properties_entry_type import RasterPropertiesEntryType from geoengine_openapi_client.models.raster_properties_key import RasterPropertiesKey -from geoengine_openapi_client.models.raster_query_rectangle import RasterQueryRectangle from geoengine_openapi_client.models.raster_result_descriptor import RasterResultDescriptor from geoengine_openapi_client.models.raster_stream_websocket_result_type import RasterStreamWebsocketResultType from geoengine_openapi_client.models.raster_symbology import RasterSymbology @@ -212,6 +207,7 @@ from geoengine_openapi_client.models.spatial_reference_authority import SpatialReferenceAuthority from geoengine_openapi_client.models.spatial_reference_specification import SpatialReferenceSpecification from geoengine_openapi_client.models.spatial_resolution import SpatialResolution +from geoengine_openapi_client.models.static_meta_data import StaticMetaData from geoengine_openapi_client.models.static_number_param import StaticNumberParam from geoengine_openapi_client.models.stroke_param import StrokeParam from geoengine_openapi_client.models.suggest_meta_data import SuggestMetaData @@ -256,9 +252,9 @@ from geoengine_openapi_client.models.user_info import UserInfo from geoengine_openapi_client.models.user_registration import UserRegistration from geoengine_openapi_client.models.user_session import UserSession +from geoengine_openapi_client.models.vec_update import VecUpdate from geoengine_openapi_client.models.vector_column_info import VectorColumnInfo from geoengine_openapi_client.models.vector_data_type import VectorDataType -from geoengine_openapi_client.models.vector_query_rectangle import VectorQueryRectangle from geoengine_openapi_client.models.vector_result_descriptor import VectorResultDescriptor from geoengine_openapi_client.models.volume import Volume from geoengine_openapi_client.models.volume_file_layers_response import VolumeFileLayersResponse diff --git a/python/geoengine_openapi_client/api/datasets_api.py b/python/geoengine_openapi_client/api/datasets_api.py index 5c30aedd..6f49a119 100644 --- a/python/geoengine_openapi_client/api/datasets_api.py +++ b/python/geoengine_openapi_client/api/datasets_api.py @@ -12,23 +12,19 @@ Do not edit the class manually. """ # noqa: E501 - -import re # noqa: F401 -import io import warnings - -from pydantic import validate_arguments, ValidationError - +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union from typing_extensions import Annotated -from pydantic import Field, StrictStr, conint, conlist +from pydantic import Field, StrictStr from typing import List, Optional - +from typing_extensions import Annotated from geoengine_openapi_client.models.auto_create_dataset import AutoCreateDataset from geoengine_openapi_client.models.create_dataset import CreateDataset -from geoengine_openapi_client.models.create_dataset_handler200_response import CreateDatasetHandler200Response from geoengine_openapi_client.models.dataset import Dataset from geoengine_openapi_client.models.dataset_listing import DatasetListing +from geoengine_openapi_client.models.inline_object1 import InlineObject1 from geoengine_openapi_client.models.meta_data_definition import MetaDataDefinition from geoengine_openapi_client.models.meta_data_suggestion import MetaDataSuggestion from geoengine_openapi_client.models.order_by import OrderBy @@ -39,12 +35,9 @@ from geoengine_openapi_client.models.volume import Volume from geoengine_openapi_client.models.volume_file_layers_response import VolumeFileLayersResponse -from geoengine_openapi_client.api_client import ApiClient +from geoengine_openapi_client.api_client import ApiClient, RequestSerialized from geoengine_openapi_client.api_response import ApiResponse -from geoengine_openapi_client.exceptions import ( # noqa: F401 - ApiTypeError, - ApiValueError -) +from geoengine_openapi_client.rest import RESTResponseType class DatasetsApi: @@ -59,764 +52,1370 @@ def __init__(self, api_client=None) -> None: api_client = ApiClient.get_default() self.api_client = api_client - @validate_arguments - def auto_create_dataset_handler(self, auto_create_dataset : AutoCreateDataset, **kwargs) -> CreateDatasetHandler200Response: # noqa: E501 - """Creates a new dataset using previously uploaded files. # noqa: E501 - The format of the files will be automatically detected when possible. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True + @validate_call + def auto_create_dataset_handler( + self, + auto_create_dataset: AutoCreateDataset, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> InlineObject1: + """Creates a new dataset using previously uploaded files. The format of the files will be automatically detected when possible. - >>> thread = api.auto_create_dataset_handler(auto_create_dataset, async_req=True) - >>> result = thread.get() :param auto_create_dataset: (required) :type auto_create_dataset: AutoCreateDataset - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: CreateDatasetHandler200Response - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the auto_create_dataset_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.auto_create_dataset_handler_with_http_info(auto_create_dataset, **kwargs) # noqa: E501 - - @validate_arguments - def auto_create_dataset_handler_with_http_info(self, auto_create_dataset : AutoCreateDataset, **kwargs) -> ApiResponse: # noqa: E501 - """Creates a new dataset using previously uploaded files. # noqa: E501 - - The format of the files will be automatically detected when possible. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.auto_create_dataset_handler_with_http_info(auto_create_dataset, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._auto_create_dataset_handler_serialize( + auto_create_dataset=auto_create_dataset, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "InlineObject1", + '400': "ErrorResponse", + '401': "ErrorResponse", + '413': "ErrorResponse", + '415': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def auto_create_dataset_handler_with_http_info( + self, + auto_create_dataset: AutoCreateDataset, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[InlineObject1]: + """Creates a new dataset using previously uploaded files. The format of the files will be automatically detected when possible. + :param auto_create_dataset: (required) :type auto_create_dataset: AutoCreateDataset - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(CreateDatasetHandler200Response, status_code(int), headers(HTTPHeaderDict)) - """ + """ # noqa: E501 + + _param = self._auto_create_dataset_handler_serialize( + auto_create_dataset=auto_create_dataset, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "InlineObject1", + '400': "ErrorResponse", + '401': "ErrorResponse", + '413': "ErrorResponse", + '415': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _params = locals() - _all_params = [ - 'auto_create_dataset' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + @validate_call + def auto_create_dataset_handler_without_preload_content( + self, + auto_create_dataset: AutoCreateDataset, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Creates a new dataset using previously uploaded files. The format of the files will be automatically detected when possible. + + + :param auto_create_dataset: (required) + :type auto_create_dataset: AutoCreateDataset + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._auto_create_dataset_handler_serialize( + auto_create_dataset=auto_create_dataset, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method auto_create_dataset_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': "InlineObject1", + '400': "ErrorResponse", + '401': "ErrorResponse", + '413': "ErrorResponse", + '415': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} + def _auto_create_dataset_handler_serialize( + self, + auto_create_dataset, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None - if _params['auto_create_dataset'] is not None: - _body_params = _params['auto_create_dataset'] + if auto_create_dataset is not None: + _body_params = auto_create_dataset + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) # set the HTTP header `Content-Type` - _content_types_list = _params.get('_content_type', - self.api_client.select_header_content_type( - ['application/json'])) - if _content_types_list: - _header_params['Content-Type'] = _content_types_list + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - - _response_types_map = { - '200': "CreateDatasetHandler200Response", - '400': "ErrorResponse", - '401': "ErrorResponse", - '413': "ErrorResponse", - '415': "ErrorResponse", - } + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/dataset/auto', 'POST', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='POST', + resource_path='/dataset/auto', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + - @validate_arguments - def create_dataset_handler(self, create_dataset : CreateDataset, **kwargs) -> CreateDatasetHandler200Response: # noqa: E501 - """Creates a new dataset referencing files. # noqa: E501 - Users can reference previously uploaded files. Admins can reference files from a volume. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.create_dataset_handler(create_dataset, async_req=True) - >>> result = thread.get() + @validate_call + def create_dataset_handler( + self, + create_dataset: CreateDataset, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> InlineObject1: + """Creates a new dataset referencing files. Users can reference previously uploaded files. Admins can reference files from a volume. + :param create_dataset: (required) :type create_dataset: CreateDataset - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: CreateDatasetHandler200Response - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the create_dataset_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.create_dataset_handler_with_http_info(create_dataset, **kwargs) # noqa: E501 - - @validate_arguments - def create_dataset_handler_with_http_info(self, create_dataset : CreateDataset, **kwargs) -> ApiResponse: # noqa: E501 - """Creates a new dataset referencing files. # noqa: E501 - - Users can reference previously uploaded files. Admins can reference files from a volume. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.create_dataset_handler_with_http_info(create_dataset, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._create_dataset_handler_serialize( + create_dataset=create_dataset, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "InlineObject1", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def create_dataset_handler_with_http_info( + self, + create_dataset: CreateDataset, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[InlineObject1]: + """Creates a new dataset referencing files. Users can reference previously uploaded files. Admins can reference files from a volume. + :param create_dataset: (required) :type create_dataset: CreateDataset - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(CreateDatasetHandler200Response, status_code(int), headers(HTTPHeaderDict)) - """ + """ # noqa: E501 + + _param = self._create_dataset_handler_serialize( + create_dataset=create_dataset, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "InlineObject1", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _params = locals() - _all_params = [ - 'create_dataset' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + @validate_call + def create_dataset_handler_without_preload_content( + self, + create_dataset: CreateDataset, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Creates a new dataset referencing files. Users can reference previously uploaded files. Admins can reference files from a volume. + + + :param create_dataset: (required) + :type create_dataset: CreateDataset + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_dataset_handler_serialize( + create_dataset=create_dataset, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method create_dataset_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': "InlineObject1", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} + def _create_dataset_handler_serialize( + self, + create_dataset, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None - if _params['create_dataset'] is not None: - _body_params = _params['create_dataset'] + if create_dataset is not None: + _body_params = create_dataset + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) # set the HTTP header `Content-Type` - _content_types_list = _params.get('_content_type', - self.api_client.select_header_content_type( - ['application/json'])) - if _content_types_list: - _header_params['Content-Type'] = _content_types_list + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - - _response_types_map = { - '200': "CreateDatasetHandler200Response", - } + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/dataset', 'POST', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='POST', + resource_path='/dataset', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + - @validate_arguments - def delete_dataset_handler(self, dataset : Annotated[StrictStr, Field(..., description="Dataset id")], **kwargs) -> None: # noqa: E501 - """Delete a dataset # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.delete_dataset_handler(dataset, async_req=True) - >>> result = thread.get() + @validate_call + def delete_dataset_handler( + self, + dataset: Annotated[StrictStr, Field(description="Dataset id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Delete a dataset + :param dataset: Dataset id (required) :type dataset: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the delete_dataset_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.delete_dataset_handler_with_http_info(dataset, **kwargs) # noqa: E501 - - @validate_arguments - def delete_dataset_handler_with_http_info(self, dataset : Annotated[StrictStr, Field(..., description="Dataset id")], **kwargs) -> ApiResponse: # noqa: E501 - """Delete a dataset # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.delete_dataset_handler_with_http_info(dataset, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._delete_dataset_handler_serialize( + dataset=dataset, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def delete_dataset_handler_with_http_info( + self, + dataset: Annotated[StrictStr, Field(description="Dataset id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Delete a dataset + :param dataset: Dataset id (required) :type dataset: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ + """ # noqa: E501 + + _param = self._delete_dataset_handler_serialize( + dataset=dataset, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _params = locals() - _all_params = [ - 'dataset' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + @validate_call + def delete_dataset_handler_without_preload_content( + self, + dataset: Annotated[StrictStr, Field(description="Dataset id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Delete a dataset + + + :param dataset: Dataset id (required) + :type dataset: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_dataset_handler_serialize( + dataset=dataset, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method delete_dataset_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} - if _params['dataset']: - _path_params['dataset'] = _params['dataset'] + def _delete_dataset_handler_serialize( + self, + dataset, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + # process the path parameters + if dataset is not None: + _path_params['dataset'] = dataset # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None + + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = {} + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/dataset/{dataset}', 'DELETE', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='DELETE', + resource_path='/dataset/{dataset}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) - @validate_arguments - def get_dataset_handler(self, dataset : Annotated[StrictStr, Field(..., description="Dataset Name")], **kwargs) -> Dataset: # noqa: E501 - """Retrieves details about a dataset using the internal name. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_dataset_handler(dataset, async_req=True) - >>> result = thread.get() - :param dataset: Dataset Name (required) - :type dataset: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: Dataset - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the get_dataset_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.get_dataset_handler_with_http_info(dataset, **kwargs) # noqa: E501 - - @validate_arguments - def get_dataset_handler_with_http_info(self, dataset : Annotated[StrictStr, Field(..., description="Dataset Name")], **kwargs) -> ApiResponse: # noqa: E501 - """Retrieves details about a dataset using the internal name. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.get_dataset_handler_with_http_info(dataset, async_req=True) - >>> result = thread.get() + @validate_call + def get_dataset_handler( + self, + dataset: Annotated[StrictStr, Field(description="Dataset Name")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Dataset: + """Retrieves details about a dataset using the internal name. + :param dataset: Dataset Name (required) :type dataset: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(Dataset, status_code(int), headers(HTTPHeaderDict)) - """ - - _params = locals() - - _all_params = [ - 'dataset' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' - ] + """ # noqa: E501 + + _param = self._get_dataset_handler_serialize( + dataset=dataset, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method get_dataset_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] - - _collection_formats = {} - - # process the path parameters - _path_params = {} - if _params['dataset']: - _path_params['dataset'] = _params['dataset'] - + _response_types_map: Dict[str, Optional[str]] = { + '200': "Dataset", + '400': "ErrorResponse", + '401': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_dataset_handler_with_http_info( + self, + dataset: Annotated[StrictStr, Field(description="Dataset Name")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[Dataset]: + """Retrieves details about a dataset using the internal name. - # process the query parameters - _query_params = [] - # process the header parameters - _header_params = dict(_params.get('_headers', {})) - # process the form parameters - _form_params = [] - _files = {} - # process the body parameter - _body_params = None - # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 + :param dataset: Dataset Name (required) + :type dataset: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_dataset_handler_serialize( + dataset=dataset, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - _response_types_map = { + _response_types_map: Dict[str, Optional[str]] = { '200': "Dataset", '400': "ErrorResponse", '401': "ErrorResponse", } - - return self.api_client.call_api( - '/dataset/{dataset}', 'GET', - _path_params, - _query_params, - _header_params, - body=_body_params, - post_params=_form_params, - files=_files, + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, response_types_map=_response_types_map, - auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), - collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) - - @validate_arguments - def get_loading_info_handler(self, dataset : Annotated[StrictStr, Field(..., description="Dataset Name")], **kwargs) -> MetaDataDefinition: # noqa: E501 - """Retrieves the loading information of a dataset # noqa: E501 + ) - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_loading_info_handler(dataset, async_req=True) - >>> result = thread.get() + @validate_call + def get_dataset_handler_without_preload_content( + self, + dataset: Annotated[StrictStr, Field(description="Dataset Name")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Retrieves details about a dataset using the internal name. - :param dataset: Dataset Name (required) - :type dataset: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: MetaDataDefinition - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the get_loading_info_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.get_loading_info_handler_with_http_info(dataset, **kwargs) # noqa: E501 - - @validate_arguments - def get_loading_info_handler_with_http_info(self, dataset : Annotated[StrictStr, Field(..., description="Dataset Name")], **kwargs) -> ApiResponse: # noqa: E501 - """Retrieves the loading information of a dataset # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.get_loading_info_handler_with_http_info(dataset, async_req=True) - >>> result = thread.get() :param dataset: Dataset Name (required) :type dataset: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(MetaDataDefinition, status_code(int), headers(HTTPHeaderDict)) - """ - - _params = locals() + """ # noqa: E501 + + _param = self._get_dataset_handler_serialize( + dataset=dataset, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - _all_params = [ - 'dataset' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' - ] + _response_types_map: Dict[str, Optional[str]] = { + '200': "Dataset", + '400': "ErrorResponse", + '401': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) + return response_data.response - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method get_loading_info_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] - _collection_formats = {} + def _get_dataset_handler_serialize( + self, + dataset, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: - # process the path parameters - _path_params = {} - if _params['dataset']: - _path_params['dataset'] = _params['dataset'] + _host = None + + _collection_formats: Dict[str, str] = { + } + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + # process the path parameters + if dataset is not None: + _path_params['dataset'] = dataset # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None + + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = { - '200': "MetaDataDefinition", - } + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/dataset/{dataset}/loadingInfo', 'GET', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='GET', + resource_path='/dataset/{dataset}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) - @validate_arguments - def list_datasets_handler(self, order : OrderBy, offset : conint(strict=True, ge=0), limit : conint(strict=True, ge=0), filter : Optional[StrictStr] = None, tags : Optional[conlist(StrictStr)] = None, **kwargs) -> List[DatasetListing]: # noqa: E501 - """Lists available datasets. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_datasets_handler(order, offset, limit, filter, tags, async_req=True) - >>> result = thread.get() - :param order: (required) - :type order: OrderBy - :param offset: (required) - :type offset: int - :param limit: (required) - :type limit: int - :param filter: - :type filter: str - :param tags: - :type tags: List[str] - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + @validate_call + def get_loading_info_handler( + self, + dataset: Annotated[StrictStr, Field(description="Dataset Name")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> MetaDataDefinition: + """Retrieves the loading information of a dataset + + + :param dataset: Dataset Name (required) + :type dataset: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: List[DatasetListing] - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the list_datasets_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.list_datasets_handler_with_http_info(order, offset, limit, filter, tags, **kwargs) # noqa: E501 - - @validate_arguments - def list_datasets_handler_with_http_info(self, order : OrderBy, offset : conint(strict=True, ge=0), limit : conint(strict=True, ge=0), filter : Optional[StrictStr] = None, tags : Optional[conlist(StrictStr)] = None, **kwargs) -> ApiResponse: # noqa: E501 - """Lists available datasets. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.list_datasets_handler_with_http_info(order, offset, limit, filter, tags, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._get_loading_info_handler_serialize( + dataset=dataset, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "MetaDataDefinition", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_loading_info_handler_with_http_info( + self, + dataset: Annotated[StrictStr, Field(description="Dataset Name")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[MetaDataDefinition]: + """Retrieves the loading information of a dataset + + + :param dataset: Dataset Name (required) + :type dataset: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_loading_info_handler_serialize( + dataset=dataset, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "MetaDataDefinition", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_loading_info_handler_without_preload_content( + self, + dataset: Annotated[StrictStr, Field(description="Dataset Name")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Retrieves the loading information of a dataset + + + :param dataset: Dataset Name (required) + :type dataset: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_loading_info_handler_serialize( + dataset=dataset, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "MetaDataDefinition", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_loading_info_handler_serialize( + self, + dataset, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if dataset is not None: + _path_params['dataset'] = dataset + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/dataset/{dataset}/loadingInfo', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def list_datasets_handler( + self, + order: OrderBy, + offset: Annotated[int, Field(strict=True, ge=0)], + limit: Annotated[int, Field(strict=True, ge=0)], + filter: Optional[StrictStr] = None, + tags: Optional[List[StrictStr]] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> List[DatasetListing]: + """Lists available datasets. + :param order: (required) :type order: OrderBy @@ -828,1148 +1427,2266 @@ def list_datasets_handler_with_http_info(self, order : OrderBy, offset : conint( :type filter: str :param tags: :type tags: List[str] - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(List[DatasetListing], status_code(int), headers(HTTPHeaderDict)) - """ - - _params = locals() - - _all_params = [ - 'order', - 'offset', - 'limit', - 'filter', - 'tags' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' - ] + """ # noqa: E501 + + _param = self._list_datasets_handler_serialize( + order=order, + offset=offset, + limit=limit, + filter=filter, + tags=tags, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method list_datasets_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[DatasetListing]", + '400': "ErrorResponse", + '401': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def list_datasets_handler_with_http_info( + self, + order: OrderBy, + offset: Annotated[int, Field(strict=True, ge=0)], + limit: Annotated[int, Field(strict=True, ge=0)], + filter: Optional[StrictStr] = None, + tags: Optional[List[StrictStr]] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[List[DatasetListing]]: + """Lists available datasets. - _collection_formats = {} - # process the path parameters - _path_params = {} + :param order: (required) + :type order: OrderBy + :param offset: (required) + :type offset: int + :param limit: (required) + :type limit: int + :param filter: + :type filter: str + :param tags: + :type tags: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_datasets_handler_serialize( + order=order, + offset=offset, + limit=limit, + filter=filter, + tags=tags, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - # process the query parameters - _query_params = [] - if _params.get('filter') is not None: # noqa: E501 - _query_params.append(('filter', _params['filter'])) + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[DatasetListing]", + '400': "ErrorResponse", + '401': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def list_datasets_handler_without_preload_content( + self, + order: OrderBy, + offset: Annotated[int, Field(strict=True, ge=0)], + limit: Annotated[int, Field(strict=True, ge=0)], + filter: Optional[StrictStr] = None, + tags: Optional[List[StrictStr]] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Lists available datasets. - if _params.get('order') is not None: # noqa: E501 - _query_params.append(('order', _params['order'].value)) - if _params.get('offset') is not None: # noqa: E501 - _query_params.append(('offset', _params['offset'])) + :param order: (required) + :type order: OrderBy + :param offset: (required) + :type offset: int + :param limit: (required) + :type limit: int + :param filter: + :type filter: str + :param tags: + :type tags: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_datasets_handler_serialize( + order=order, + offset=offset, + limit=limit, + filter=filter, + tags=tags, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - if _params.get('limit') is not None: # noqa: E501 - _query_params.append(('limit', _params['limit'])) + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[DatasetListing]", + '400': "ErrorResponse", + '401': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _list_datasets_handler_serialize( + self, + order, + offset, + limit, + filter, + tags, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'tags': 'multi', + } - if _params.get('tags') is not None: # noqa: E501 - _query_params.append(('tags', _params['tags'])) - _collection_formats['tags'] = 'multi' + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + # process the path parameters + # process the query parameters + if filter is not None: + + _query_params.append(('filter', filter)) + + if order is not None: + + _query_params.append(('order', order.value)) + + if offset is not None: + + _query_params.append(('offset', offset)) + + if limit is not None: + + _query_params.append(('limit', limit)) + + if tags is not None: + + _query_params.append(('tags', tags)) + # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None + + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = { - '200': "List[DatasetListing]", - '400': "ErrorResponse", - '401': "ErrorResponse", - } + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/datasets', 'GET', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='GET', + resource_path='/datasets', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) - @validate_arguments - def list_volume_file_layers_handler(self, volume_name : Annotated[StrictStr, Field(..., description="Volume name")], file_name : Annotated[StrictStr, Field(..., description="File name")], **kwargs) -> VolumeFileLayersResponse: # noqa: E501 - """List the layers of a file in a volume. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_volume_file_layers_handler(volume_name, file_name, async_req=True) - >>> result = thread.get() + + @validate_call + def list_volume_file_layers_handler( + self, + volume_name: Annotated[StrictStr, Field(description="Volume name")], + file_name: Annotated[StrictStr, Field(description="File name")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> VolumeFileLayersResponse: + """List the layers of a file in a volume. + :param volume_name: Volume name (required) :type volume_name: str :param file_name: File name (required) :type file_name: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: VolumeFileLayersResponse - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the list_volume_file_layers_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.list_volume_file_layers_handler_with_http_info(volume_name, file_name, **kwargs) # noqa: E501 - - @validate_arguments - def list_volume_file_layers_handler_with_http_info(self, volume_name : Annotated[StrictStr, Field(..., description="Volume name")], file_name : Annotated[StrictStr, Field(..., description="File name")], **kwargs) -> ApiResponse: # noqa: E501 - """List the layers of a file in a volume. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.list_volume_file_layers_handler_with_http_info(volume_name, file_name, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._list_volume_file_layers_handler_serialize( + volume_name=volume_name, + file_name=file_name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "VolumeFileLayersResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def list_volume_file_layers_handler_with_http_info( + self, + volume_name: Annotated[StrictStr, Field(description="Volume name")], + file_name: Annotated[StrictStr, Field(description="File name")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[VolumeFileLayersResponse]: + """List the layers of a file in a volume. + :param volume_name: Volume name (required) :type volume_name: str :param file_name: File name (required) :type file_name: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(VolumeFileLayersResponse, status_code(int), headers(HTTPHeaderDict)) - """ + """ # noqa: E501 + + _param = self._list_volume_file_layers_handler_serialize( + volume_name=volume_name, + file_name=file_name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - _params = locals() + _response_types_map: Dict[str, Optional[str]] = { + '200': "VolumeFileLayersResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _all_params = [ - 'volume_name', - 'file_name' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + + @validate_call + def list_volume_file_layers_handler_without_preload_content( + self, + volume_name: Annotated[StrictStr, Field(description="Volume name")], + file_name: Annotated[StrictStr, Field(description="File name")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """List the layers of a file in a volume. + + + :param volume_name: Volume name (required) + :type volume_name: str + :param file_name: File name (required) + :type file_name: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_volume_file_layers_handler_serialize( + volume_name=volume_name, + file_name=file_name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method list_volume_file_layers_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': "VolumeFileLayersResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} - if _params['volume_name']: - _path_params['volume_name'] = _params['volume_name'] + def _list_volume_file_layers_handler_serialize( + self, + volume_name, + file_name, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: - if _params['file_name']: - _path_params['file_name'] = _params['file_name'] + _host = None + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + # process the path parameters + if volume_name is not None: + _path_params['volume_name'] = volume_name + if file_name is not None: + _path_params['file_name'] = file_name # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None + + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = { - '200': "VolumeFileLayersResponse", - } + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/dataset/volumes/{volume_name}/files/{file_name}/layers', 'GET', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='GET', + resource_path='/dataset/volumes/{volume_name}/files/{file_name}/layers', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + - @validate_arguments - def list_volumes_handler(self, **kwargs) -> List[Volume]: # noqa: E501 - """Lists available volumes. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_volumes_handler(async_req=True) - >>> result = thread.get() + @validate_call + def list_volumes_handler( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> List[Volume]: + """Lists available volumes. + - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: List[Volume] - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the list_volumes_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.list_volumes_handler_with_http_info(**kwargs) # noqa: E501 - - @validate_arguments - def list_volumes_handler_with_http_info(self, **kwargs) -> ApiResponse: # noqa: E501 - """Lists available volumes. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.list_volumes_handler_with_http_info(async_req=True) - >>> result = thread.get() - - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional + """ # noqa: E501 + + _param = self._list_volumes_handler_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[Volume]", + '401': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def list_volumes_handler_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[List[Volume]]: + """Lists available volumes. + + :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(List[Volume], status_code(int), headers(HTTPHeaderDict)) - """ + """ # noqa: E501 - _params = locals() + _param = self._list_volumes_handler_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - _all_params = [ - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[Volume]", + '401': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def list_volumes_handler_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Lists available volumes. + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_volumes_handler_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method list_volumes_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[Volume]", + '401': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} + def _list_volumes_handler_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + # process the path parameters # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None + + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = { - '200': "List[Volume]", - '401': "ErrorResponse", - } + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/dataset/volumes', 'GET', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='GET', + resource_path='/dataset/volumes', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + + - @validate_arguments - def suggest_meta_data_handler(self, suggest_meta_data : SuggestMetaData, **kwargs) -> MetaDataSuggestion: # noqa: E501 - """Inspects an upload and suggests metadata that can be used when creating a new dataset based on it. # noqa: E501 - Tries to automatically detect the main file and layer name if not specified. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True + @validate_call + def suggest_meta_data_handler( + self, + suggest_meta_data: SuggestMetaData, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> MetaDataSuggestion: + """Inspects an upload and suggests metadata that can be used when creating a new dataset based on it. Tries to automatically detect the main file and layer name if not specified. + + + :param suggest_meta_data: (required) + :type suggest_meta_data: SuggestMetaData + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._suggest_meta_data_handler_serialize( + suggest_meta_data=suggest_meta_data, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "MetaDataSuggestion", + '400': "ErrorResponse", + '401': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def suggest_meta_data_handler_with_http_info( + self, + suggest_meta_data: SuggestMetaData, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[MetaDataSuggestion]: + """Inspects an upload and suggests metadata that can be used when creating a new dataset based on it. Tries to automatically detect the main file and layer name if not specified. + + + :param suggest_meta_data: (required) + :type suggest_meta_data: SuggestMetaData + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._suggest_meta_data_handler_serialize( + suggest_meta_data=suggest_meta_data, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "MetaDataSuggestion", + '400': "ErrorResponse", + '401': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + - >>> thread = api.suggest_meta_data_handler(suggest_meta_data, async_req=True) - >>> result = thread.get() + @validate_call + def suggest_meta_data_handler_without_preload_content( + self, + suggest_meta_data: SuggestMetaData, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Inspects an upload and suggests metadata that can be used when creating a new dataset based on it. Tries to automatically detect the main file and layer name if not specified. - :param suggest_meta_data: (required) - :type suggest_meta_data: SuggestMetaData - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: MetaDataSuggestion - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the suggest_meta_data_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.suggest_meta_data_handler_with_http_info(suggest_meta_data, **kwargs) # noqa: E501 - - @validate_arguments - def suggest_meta_data_handler_with_http_info(self, suggest_meta_data : SuggestMetaData, **kwargs) -> ApiResponse: # noqa: E501 - """Inspects an upload and suggests metadata that can be used when creating a new dataset based on it. # noqa: E501 - - Tries to automatically detect the main file and layer name if not specified. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.suggest_meta_data_handler_with_http_info(suggest_meta_data, async_req=True) - >>> result = thread.get() :param suggest_meta_data: (required) :type suggest_meta_data: SuggestMetaData - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(MetaDataSuggestion, status_code(int), headers(HTTPHeaderDict)) - """ - - _params = locals() + """ # noqa: E501 + + _param = self._suggest_meta_data_handler_serialize( + suggest_meta_data=suggest_meta_data, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - _all_params = [ - 'suggest_meta_data' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' - ] + _response_types_map: Dict[str, Optional[str]] = { + '200': "MetaDataSuggestion", + '400': "ErrorResponse", + '401': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) + return response_data.response - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method suggest_meta_data_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] - _collection_formats = {} + def _suggest_meta_data_handler_serialize( + self, + suggest_meta_data, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: - # process the path parameters - _path_params = {} + _host = None + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None - if _params['suggest_meta_data'] is not None: - _body_params = _params['suggest_meta_data'] + if suggest_meta_data is not None: + _body_params = suggest_meta_data + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) # set the HTTP header `Content-Type` - _content_types_list = _params.get('_content_type', - self.api_client.select_header_content_type( - ['application/json'])) - if _content_types_list: - _header_params['Content-Type'] = _content_types_list + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - - _response_types_map = { - '200': "MetaDataSuggestion", - '400': "ErrorResponse", - '401': "ErrorResponse", - } + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/dataset/suggest', 'POST', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='POST', + resource_path='/dataset/suggest', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + + - @validate_arguments - def update_dataset_handler(self, dataset : Annotated[StrictStr, Field(..., description="Dataset Name")], update_dataset : UpdateDataset, **kwargs) -> None: # noqa: E501 - """Update details about a dataset using the internal name. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True + @validate_call + def update_dataset_handler( + self, + dataset: Annotated[StrictStr, Field(description="Dataset Name")], + update_dataset: UpdateDataset, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Update details about a dataset using the internal name. - >>> thread = api.update_dataset_handler(dataset, update_dataset, async_req=True) - >>> result = thread.get() :param dataset: Dataset Name (required) :type dataset: str :param update_dataset: (required) :type update_dataset: UpdateDataset - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the update_dataset_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.update_dataset_handler_with_http_info(dataset, update_dataset, **kwargs) # noqa: E501 - - @validate_arguments - def update_dataset_handler_with_http_info(self, dataset : Annotated[StrictStr, Field(..., description="Dataset Name")], update_dataset : UpdateDataset, **kwargs) -> ApiResponse: # noqa: E501 - """Update details about a dataset using the internal name. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.update_dataset_handler_with_http_info(dataset, update_dataset, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._update_dataset_handler_serialize( + dataset=dataset, + update_dataset=update_dataset, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def update_dataset_handler_with_http_info( + self, + dataset: Annotated[StrictStr, Field(description="Dataset Name")], + update_dataset: UpdateDataset, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Update details about a dataset using the internal name. + :param dataset: Dataset Name (required) :type dataset: str :param update_dataset: (required) :type update_dataset: UpdateDataset - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ + """ # noqa: E501 + + _param = self._update_dataset_handler_serialize( + dataset=dataset, + update_dataset=update_dataset, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - _params = locals() + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _all_params = [ - 'dataset', - 'update_dataset' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + + @validate_call + def update_dataset_handler_without_preload_content( + self, + dataset: Annotated[StrictStr, Field(description="Dataset Name")], + update_dataset: UpdateDataset, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Update details about a dataset using the internal name. + + + :param dataset: Dataset Name (required) + :type dataset: str + :param update_dataset: (required) + :type update_dataset: UpdateDataset + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_dataset_handler_serialize( + dataset=dataset, + update_dataset=update_dataset, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method update_dataset_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} - if _params['dataset']: - _path_params['dataset'] = _params['dataset'] + def _update_dataset_handler_serialize( + self, + dataset, + update_dataset, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + _host = None + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if dataset is not None: + _path_params['dataset'] = dataset # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None - if _params['update_dataset'] is not None: - _body_params = _params['update_dataset'] + if update_dataset is not None: + _body_params = update_dataset + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) # set the HTTP header `Content-Type` - _content_types_list = _params.get('_content_type', - self.api_client.select_header_content_type( - ['application/json'])) - if _content_types_list: - _header_params['Content-Type'] = _content_types_list + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - - _response_types_map = {} + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/dataset/{dataset}', 'POST', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='POST', + resource_path='/dataset/{dataset}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + + - @validate_arguments - def update_dataset_provenance_handler(self, dataset : Annotated[StrictStr, Field(..., description="Dataset Name")], provenances : Provenances, **kwargs) -> None: # noqa: E501 - """update_dataset_provenance_handler # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True + @validate_call + def update_dataset_provenance_handler( + self, + dataset: Annotated[StrictStr, Field(description="Dataset Name")], + provenances: Provenances, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """update_dataset_provenance_handler - >>> thread = api.update_dataset_provenance_handler(dataset, provenances, async_req=True) - >>> result = thread.get() :param dataset: Dataset Name (required) :type dataset: str :param provenances: (required) :type provenances: Provenances - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the update_dataset_provenance_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.update_dataset_provenance_handler_with_http_info(dataset, provenances, **kwargs) # noqa: E501 - - @validate_arguments - def update_dataset_provenance_handler_with_http_info(self, dataset : Annotated[StrictStr, Field(..., description="Dataset Name")], provenances : Provenances, **kwargs) -> ApiResponse: # noqa: E501 - """update_dataset_provenance_handler # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.update_dataset_provenance_handler_with_http_info(dataset, provenances, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._update_dataset_provenance_handler_serialize( + dataset=dataset, + provenances=provenances, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def update_dataset_provenance_handler_with_http_info( + self, + dataset: Annotated[StrictStr, Field(description="Dataset Name")], + provenances: Provenances, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """update_dataset_provenance_handler + :param dataset: Dataset Name (required) :type dataset: str :param provenances: (required) :type provenances: Provenances - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ + """ # noqa: E501 + + _param = self._update_dataset_provenance_handler_serialize( + dataset=dataset, + provenances=provenances, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - _params = locals() + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _all_params = [ - 'dataset', - 'provenances' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + + @validate_call + def update_dataset_provenance_handler_without_preload_content( + self, + dataset: Annotated[StrictStr, Field(description="Dataset Name")], + provenances: Provenances, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """update_dataset_provenance_handler + + + :param dataset: Dataset Name (required) + :type dataset: str + :param provenances: (required) + :type provenances: Provenances + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_dataset_provenance_handler_serialize( + dataset=dataset, + provenances=provenances, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method update_dataset_provenance_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} - if _params['dataset']: - _path_params['dataset'] = _params['dataset'] + def _update_dataset_provenance_handler_serialize( + self, + dataset, + provenances, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + _host = None + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if dataset is not None: + _path_params['dataset'] = dataset # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None - if _params['provenances'] is not None: - _body_params = _params['provenances'] + if provenances is not None: + _body_params = provenances + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) # set the HTTP header `Content-Type` - _content_types_list = _params.get('_content_type', - self.api_client.select_header_content_type( - ['application/json'])) - if _content_types_list: - _header_params['Content-Type'] = _content_types_list + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - - _response_types_map = {} + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/dataset/{dataset}/provenance', 'PUT', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='PUT', + resource_path='/dataset/{dataset}/provenance', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + + - @validate_arguments - def update_dataset_symbology_handler(self, dataset : Annotated[StrictStr, Field(..., description="Dataset Name")], symbology : Symbology, **kwargs) -> None: # noqa: E501 - """Updates the dataset's symbology # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True + @validate_call + def update_dataset_symbology_handler( + self, + dataset: Annotated[StrictStr, Field(description="Dataset Name")], + symbology: Symbology, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Updates the dataset's symbology - >>> thread = api.update_dataset_symbology_handler(dataset, symbology, async_req=True) - >>> result = thread.get() :param dataset: Dataset Name (required) :type dataset: str :param symbology: (required) :type symbology: Symbology - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the update_dataset_symbology_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.update_dataset_symbology_handler_with_http_info(dataset, symbology, **kwargs) # noqa: E501 - - @validate_arguments - def update_dataset_symbology_handler_with_http_info(self, dataset : Annotated[StrictStr, Field(..., description="Dataset Name")], symbology : Symbology, **kwargs) -> ApiResponse: # noqa: E501 - """Updates the dataset's symbology # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.update_dataset_symbology_handler_with_http_info(dataset, symbology, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._update_dataset_symbology_handler_serialize( + dataset=dataset, + symbology=symbology, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def update_dataset_symbology_handler_with_http_info( + self, + dataset: Annotated[StrictStr, Field(description="Dataset Name")], + symbology: Symbology, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Updates the dataset's symbology + :param dataset: Dataset Name (required) :type dataset: str :param symbology: (required) :type symbology: Symbology - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ + """ # noqa: E501 + + _param = self._update_dataset_symbology_handler_serialize( + dataset=dataset, + symbology=symbology, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - _params = locals() + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _all_params = [ - 'dataset', - 'symbology' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + + @validate_call + def update_dataset_symbology_handler_without_preload_content( + self, + dataset: Annotated[StrictStr, Field(description="Dataset Name")], + symbology: Symbology, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Updates the dataset's symbology + + + :param dataset: Dataset Name (required) + :type dataset: str + :param symbology: (required) + :type symbology: Symbology + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_dataset_symbology_handler_serialize( + dataset=dataset, + symbology=symbology, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method update_dataset_symbology_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} - if _params['dataset']: - _path_params['dataset'] = _params['dataset'] + def _update_dataset_symbology_handler_serialize( + self, + dataset, + symbology, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + _host = None + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if dataset is not None: + _path_params['dataset'] = dataset # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None - if _params['symbology'] is not None: - _body_params = _params['symbology'] + if symbology is not None: + _body_params = symbology + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) # set the HTTP header `Content-Type` - _content_types_list = _params.get('_content_type', - self.api_client.select_header_content_type( - ['application/json'])) - if _content_types_list: - _header_params['Content-Type'] = _content_types_list + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - - _response_types_map = {} + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/dataset/{dataset}/symbology', 'PUT', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='PUT', + resource_path='/dataset/{dataset}/symbology', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + + - @validate_arguments - def update_loading_info_handler(self, dataset : Annotated[StrictStr, Field(..., description="Dataset Name")], meta_data_definition : MetaDataDefinition, **kwargs) -> None: # noqa: E501 - """Updates the dataset's loading info # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True + @validate_call + def update_loading_info_handler( + self, + dataset: Annotated[StrictStr, Field(description="Dataset Name")], + meta_data_definition: MetaDataDefinition, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Updates the dataset's loading info - >>> thread = api.update_loading_info_handler(dataset, meta_data_definition, async_req=True) - >>> result = thread.get() :param dataset: Dataset Name (required) :type dataset: str :param meta_data_definition: (required) :type meta_data_definition: MetaDataDefinition - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the update_loading_info_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.update_loading_info_handler_with_http_info(dataset, meta_data_definition, **kwargs) # noqa: E501 - - @validate_arguments - def update_loading_info_handler_with_http_info(self, dataset : Annotated[StrictStr, Field(..., description="Dataset Name")], meta_data_definition : MetaDataDefinition, **kwargs) -> ApiResponse: # noqa: E501 - """Updates the dataset's loading info # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.update_loading_info_handler_with_http_info(dataset, meta_data_definition, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._update_loading_info_handler_serialize( + dataset=dataset, + meta_data_definition=meta_data_definition, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def update_loading_info_handler_with_http_info( + self, + dataset: Annotated[StrictStr, Field(description="Dataset Name")], + meta_data_definition: MetaDataDefinition, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Updates the dataset's loading info + :param dataset: Dataset Name (required) :type dataset: str :param meta_data_definition: (required) :type meta_data_definition: MetaDataDefinition - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ + """ # noqa: E501 + + _param = self._update_loading_info_handler_serialize( + dataset=dataset, + meta_data_definition=meta_data_definition, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - _params = locals() + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _all_params = [ - 'dataset', - 'meta_data_definition' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + + @validate_call + def update_loading_info_handler_without_preload_content( + self, + dataset: Annotated[StrictStr, Field(description="Dataset Name")], + meta_data_definition: MetaDataDefinition, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Updates the dataset's loading info + + + :param dataset: Dataset Name (required) + :type dataset: str + :param meta_data_definition: (required) + :type meta_data_definition: MetaDataDefinition + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_loading_info_handler_serialize( + dataset=dataset, + meta_data_definition=meta_data_definition, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method update_loading_info_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} - if _params['dataset']: - _path_params['dataset'] = _params['dataset'] + def _update_loading_info_handler_serialize( + self, + dataset, + meta_data_definition, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + _host = None + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if dataset is not None: + _path_params['dataset'] = dataset # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None - if _params['meta_data_definition'] is not None: - _body_params = _params['meta_data_definition'] + if meta_data_definition is not None: + _body_params = meta_data_definition + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) # set the HTTP header `Content-Type` - _content_types_list = _params.get('_content_type', - self.api_client.select_header_content_type( - ['application/json'])) - if _content_types_list: - _header_params['Content-Type'] = _content_types_list + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - - _response_types_map = {} + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/dataset/{dataset}/loadingInfo', 'PUT', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='PUT', + resource_path='/dataset/{dataset}/loadingInfo', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/python/geoengine_openapi_client/api/general_api.py b/python/geoengine_openapi_client/api/general_api.py index 3d0a0f88..aa468d0c 100644 --- a/python/geoengine_openapi_client/api/general_api.py +++ b/python/geoengine_openapi_client/api/general_api.py @@ -12,21 +12,16 @@ Do not edit the class manually. """ # noqa: E501 - -import re # noqa: F401 -import io import warnings - -from pydantic import validate_arguments, ValidationError +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated from geoengine_openapi_client.models.server_info import ServerInfo -from geoengine_openapi_client.api_client import ApiClient +from geoengine_openapi_client.api_client import ApiClient, RequestSerialized from geoengine_openapi_client.api_response import ApiResponse -from geoengine_openapi_client.exceptions import ( # noqa: F401 - ApiTypeError, - ApiValueError -) +from geoengine_openapi_client.rest import RESTResponseType class GeneralApi: @@ -41,256 +36,479 @@ def __init__(self, api_client=None) -> None: api_client = ApiClient.get_default() self.api_client = api_client - @validate_arguments - def available_handler(self, **kwargs) -> None: # noqa: E501 - """Server availablity check. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True + @validate_call + def available_handler( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Server availablity check. - >>> thread = api.available_handler(async_req=True) - >>> result = thread.get() - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the available_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.available_handler_with_http_info(**kwargs) # noqa: E501 - - @validate_arguments - def available_handler_with_http_info(self, **kwargs) -> ApiResponse: # noqa: E501 - """Server availablity check. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.available_handler_with_http_info(async_req=True) - >>> result = thread.get() - - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional + """ # noqa: E501 + + _param = self._available_handler_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '204': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def available_handler_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Server availablity check. + + :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ + """ # noqa: E501 - _params = locals() + _param = self._available_handler_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - _all_params = [ - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + _response_types_map: Dict[str, Optional[str]] = { + '204': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def available_handler_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Server availablity check. + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._available_handler_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method available_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '204': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} + def _available_handler_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None - # authentication setting - _auth_settings = [] # noqa: E501 - _response_types_map = {} - return self.api_client.call_api( - '/available', 'GET', - _path_params, - _query_params, - _header_params, + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/available', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) - @validate_arguments - def server_info_handler(self, **kwargs) -> ServerInfo: # noqa: E501 - """Shows information about the server software version. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.server_info_handler(async_req=True) - >>> result = thread.get() - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + @validate_call + def server_info_handler( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ServerInfo: + """Shows information about the server software version. + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: ServerInfo - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the server_info_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.server_info_handler_with_http_info(**kwargs) # noqa: E501 - - @validate_arguments - def server_info_handler_with_http_info(self, **kwargs) -> ApiResponse: # noqa: E501 - """Shows information about the server software version. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.server_info_handler_with_http_info(async_req=True) - >>> result = thread.get() - - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional + """ # noqa: E501 + + _param = self._server_info_handler_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "ServerInfo", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def server_info_handler_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[ServerInfo]: + """Shows information about the server software version. + + :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(ServerInfo, status_code(int), headers(HTTPHeaderDict)) - """ + """ # noqa: E501 - _params = locals() + _param = self._server_info_handler_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - _all_params = [ - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + _response_types_map: Dict[str, Optional[str]] = { + '200': "ServerInfo", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def server_info_handler_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Shows information about the server software version. + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._server_info_handler_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method server_info_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': "ServerInfo", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} + def _server_info_handler_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None + + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) - # authentication setting - _auth_settings = [] # noqa: E501 - _response_types_map = { - '200': "ServerInfo", - } + # authentication setting + _auth_settings: List[str] = [ + ] - return self.api_client.call_api( - '/info', 'GET', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='GET', + resource_path='/info', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/python/geoengine_openapi_client/api/layers_api.py b/python/geoengine_openapi_client/api/layers_api.py index 1d4668de..df238cd2 100644 --- a/python/geoengine_openapi_client/api/layers_api.py +++ b/python/geoengine_openapi_client/api/layers_api.py @@ -12,21 +12,17 @@ Do not edit the class manually. """ # noqa: E501 - -import re # noqa: F401 -import io import warnings - -from pydantic import validate_arguments, ValidationError - +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union from typing_extensions import Annotated -from pydantic import Field, StrictStr, conint +from pydantic import Field, StrictStr from typing import List - -from geoengine_openapi_client.models.add_collection200_response import AddCollection200Response +from typing_extensions import Annotated from geoengine_openapi_client.models.add_layer import AddLayer from geoengine_openapi_client.models.add_layer_collection import AddLayerCollection +from geoengine_openapi_client.models.inline_object2 import InlineObject2 from geoengine_openapi_client.models.layer import Layer from geoengine_openapi_client.models.layer_collection import LayerCollection from geoengine_openapi_client.models.provider_capabilities import ProviderCapabilities @@ -35,12 +31,9 @@ from geoengine_openapi_client.models.update_layer import UpdateLayer from geoengine_openapi_client.models.update_layer_collection import UpdateLayerCollection -from geoengine_openapi_client.api_client import ApiClient +from geoengine_openapi_client.api_client import ApiClient, RequestSerialized from geoengine_openapi_client.api_response import ApiResponse -from geoengine_openapi_client.exceptions import ( # noqa: F401 - ApiTypeError, - ApiValueError -) +from geoengine_openapi_client.rest import RESTResponseType class LayersApi: @@ -55,601 +48,1135 @@ def __init__(self, api_client=None) -> None: api_client = ApiClient.get_default() self.api_client = api_client - @validate_arguments - def add_collection(self, collection : Annotated[StrictStr, Field(..., description="Layer collection id")], add_layer_collection : AddLayerCollection, **kwargs) -> AddCollection200Response: # noqa: E501 - """Add a new collection to an existing collection # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True + @validate_call + def add_collection( + self, + collection: Annotated[StrictStr, Field(description="Layer collection id")], + add_layer_collection: AddLayerCollection, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> InlineObject2: + """Add a new collection to an existing collection - >>> thread = api.add_collection(collection, add_layer_collection, async_req=True) - >>> result = thread.get() :param collection: Layer collection id (required) :type collection: str :param add_layer_collection: (required) :type add_layer_collection: AddLayerCollection - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: AddCollection200Response - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the add_collection_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.add_collection_with_http_info(collection, add_layer_collection, **kwargs) # noqa: E501 - - @validate_arguments - def add_collection_with_http_info(self, collection : Annotated[StrictStr, Field(..., description="Layer collection id")], add_layer_collection : AddLayerCollection, **kwargs) -> ApiResponse: # noqa: E501 - """Add a new collection to an existing collection # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.add_collection_with_http_info(collection, add_layer_collection, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._add_collection_serialize( + collection=collection, + add_layer_collection=add_layer_collection, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "InlineObject2", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def add_collection_with_http_info( + self, + collection: Annotated[StrictStr, Field(description="Layer collection id")], + add_layer_collection: AddLayerCollection, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[InlineObject2]: + """Add a new collection to an existing collection + :param collection: Layer collection id (required) :type collection: str :param add_layer_collection: (required) :type add_layer_collection: AddLayerCollection - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(AddCollection200Response, status_code(int), headers(HTTPHeaderDict)) - """ + """ # noqa: E501 + + _param = self._add_collection_serialize( + collection=collection, + add_layer_collection=add_layer_collection, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "InlineObject2", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _params = locals() - _all_params = [ - 'collection', - 'add_layer_collection' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + @validate_call + def add_collection_without_preload_content( + self, + collection: Annotated[StrictStr, Field(description="Layer collection id")], + add_layer_collection: AddLayerCollection, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Add a new collection to an existing collection + + + :param collection: Layer collection id (required) + :type collection: str + :param add_layer_collection: (required) + :type add_layer_collection: AddLayerCollection + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_collection_serialize( + collection=collection, + add_layer_collection=add_layer_collection, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method add_collection" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': "InlineObject2", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} - if _params['collection']: - _path_params['collection'] = _params['collection'] + def _add_collection_serialize( + self, + collection, + add_layer_collection, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + # process the path parameters + if collection is not None: + _path_params['collection'] = collection # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None - if _params['add_layer_collection'] is not None: - _body_params = _params['add_layer_collection'] + if add_layer_collection is not None: + _body_params = add_layer_collection + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) # set the HTTP header `Content-Type` - _content_types_list = _params.get('_content_type', - self.api_client.select_header_content_type( - ['application/json'])) - if _content_types_list: - _header_params['Content-Type'] = _content_types_list + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - - _response_types_map = { - '200': "AddCollection200Response", - } + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/layerDb/collections/{collection}/collections', 'POST', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='POST', + resource_path='/layerDb/collections/{collection}/collections', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + + - @validate_arguments - def add_existing_collection_to_collection(self, parent : Annotated[StrictStr, Field(..., description="Parent layer collection id")], collection : Annotated[StrictStr, Field(..., description="Layer collection id")], **kwargs) -> None: # noqa: E501 - """Add an existing collection to a collection # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True + @validate_call + def add_existing_collection_to_collection( + self, + parent: Annotated[StrictStr, Field(description="Parent layer collection id")], + collection: Annotated[StrictStr, Field(description="Layer collection id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Add an existing collection to a collection - >>> thread = api.add_existing_collection_to_collection(parent, collection, async_req=True) - >>> result = thread.get() :param parent: Parent layer collection id (required) :type parent: str :param collection: Layer collection id (required) :type collection: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the add_existing_collection_to_collection_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.add_existing_collection_to_collection_with_http_info(parent, collection, **kwargs) # noqa: E501 - - @validate_arguments - def add_existing_collection_to_collection_with_http_info(self, parent : Annotated[StrictStr, Field(..., description="Parent layer collection id")], collection : Annotated[StrictStr, Field(..., description="Layer collection id")], **kwargs) -> ApiResponse: # noqa: E501 - """Add an existing collection to a collection # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.add_existing_collection_to_collection_with_http_info(parent, collection, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._add_existing_collection_to_collection_serialize( + parent=parent, + collection=collection, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def add_existing_collection_to_collection_with_http_info( + self, + parent: Annotated[StrictStr, Field(description="Parent layer collection id")], + collection: Annotated[StrictStr, Field(description="Layer collection id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Add an existing collection to a collection + :param parent: Parent layer collection id (required) :type parent: str :param collection: Layer collection id (required) :type collection: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ + """ # noqa: E501 + + _param = self._add_existing_collection_to_collection_serialize( + parent=parent, + collection=collection, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _params = locals() - _all_params = [ - 'parent', - 'collection' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + @validate_call + def add_existing_collection_to_collection_without_preload_content( + self, + parent: Annotated[StrictStr, Field(description="Parent layer collection id")], + collection: Annotated[StrictStr, Field(description="Layer collection id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Add an existing collection to a collection + + + :param parent: Parent layer collection id (required) + :type parent: str + :param collection: Layer collection id (required) + :type collection: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_existing_collection_to_collection_serialize( + parent=parent, + collection=collection, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method add_existing_collection_to_collection" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} - if _params['parent']: - _path_params['parent'] = _params['parent'] + def _add_existing_collection_to_collection_serialize( + self, + parent, + collection, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: - if _params['collection']: - _path_params['collection'] = _params['collection'] + _host = None + + _collection_formats: Dict[str, str] = { + } + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + # process the path parameters + if parent is not None: + _path_params['parent'] = parent + if collection is not None: + _path_params['collection'] = collection # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = {} - return self.api_client.call_api( - '/layerDb/collections/{parent}/collections/{collection}', 'POST', - _path_params, - _query_params, - _header_params, + + + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/layerDb/collections/{parent}/collections/{collection}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + - @validate_arguments - def add_existing_layer_to_collection(self, collection : Annotated[StrictStr, Field(..., description="Layer collection id")], layer : Annotated[StrictStr, Field(..., description="Layer id")], **kwargs) -> None: # noqa: E501 - """Add an existing layer to a collection # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.add_existing_layer_to_collection(collection, layer, async_req=True) - >>> result = thread.get() + @validate_call + def add_existing_layer_to_collection( + self, + collection: Annotated[StrictStr, Field(description="Layer collection id")], + layer: Annotated[StrictStr, Field(description="Layer id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Add an existing layer to a collection + :param collection: Layer collection id (required) :type collection: str :param layer: Layer id (required) :type layer: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the add_existing_layer_to_collection_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.add_existing_layer_to_collection_with_http_info(collection, layer, **kwargs) # noqa: E501 - - @validate_arguments - def add_existing_layer_to_collection_with_http_info(self, collection : Annotated[StrictStr, Field(..., description="Layer collection id")], layer : Annotated[StrictStr, Field(..., description="Layer id")], **kwargs) -> ApiResponse: # noqa: E501 - """Add an existing layer to a collection # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.add_existing_layer_to_collection_with_http_info(collection, layer, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._add_existing_layer_to_collection_serialize( + collection=collection, + layer=layer, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def add_existing_layer_to_collection_with_http_info( + self, + collection: Annotated[StrictStr, Field(description="Layer collection id")], + layer: Annotated[StrictStr, Field(description="Layer id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Add an existing layer to a collection + :param collection: Layer collection id (required) :type collection: str :param layer: Layer id (required) :type layer: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ + """ # noqa: E501 + + _param = self._add_existing_layer_to_collection_serialize( + collection=collection, + layer=layer, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _params = locals() - _all_params = [ - 'collection', - 'layer' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + @validate_call + def add_existing_layer_to_collection_without_preload_content( + self, + collection: Annotated[StrictStr, Field(description="Layer collection id")], + layer: Annotated[StrictStr, Field(description="Layer id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Add an existing layer to a collection + + + :param collection: Layer collection id (required) + :type collection: str + :param layer: Layer id (required) + :type layer: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_existing_layer_to_collection_serialize( + collection=collection, + layer=layer, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method add_existing_layer_to_collection" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} - if _params['collection']: - _path_params['collection'] = _params['collection'] + def _add_existing_layer_to_collection_serialize( + self, + collection, + layer, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None - if _params['layer']: - _path_params['layer'] = _params['layer'] + _collection_formats: Dict[str, str] = { + } + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + # process the path parameters + if collection is not None: + _path_params['collection'] = collection + if layer is not None: + _path_params['layer'] = layer # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = {} - return self.api_client.call_api( - '/layerDb/collections/{collection}/layers/{layer}', 'POST', - _path_params, - _query_params, - _header_params, + + + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/layerDb/collections/{collection}/layers/{layer}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) - @validate_arguments - def add_layer(self, collection : Annotated[StrictStr, Field(..., description="Layer collection id")], add_layer : AddLayer, **kwargs) -> AddCollection200Response: # noqa: E501 - """Add a new layer to a collection # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.add_layer(collection, add_layer, async_req=True) - >>> result = thread.get() + + @validate_call + def add_layer( + self, + collection: Annotated[StrictStr, Field(description="Layer collection id")], + add_layer: AddLayer, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> InlineObject2: + """Add a new layer to a collection + :param collection: Layer collection id (required) :type collection: str :param add_layer: (required) :type add_layer: AddLayer - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: AddCollection200Response - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the add_layer_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.add_layer_with_http_info(collection, add_layer, **kwargs) # noqa: E501 - - @validate_arguments - def add_layer_with_http_info(self, collection : Annotated[StrictStr, Field(..., description="Layer collection id")], add_layer : AddLayer, **kwargs) -> ApiResponse: # noqa: E501 - """Add a new layer to a collection # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.add_layer_with_http_info(collection, add_layer, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._add_layer_serialize( + collection=collection, + add_layer=add_layer, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "InlineObject2", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def add_layer_with_http_info( + self, + collection: Annotated[StrictStr, Field(description="Layer collection id")], + add_layer: AddLayer, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[InlineObject2]: + """Add a new layer to a collection + :param collection: Layer collection id (required) :type collection: str :param add_layer: (required) :type add_layer: AddLayer - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(AddCollection200Response, status_code(int), headers(HTTPHeaderDict)) - """ + """ # noqa: E501 + + _param = self._add_layer_serialize( + collection=collection, + add_layer=add_layer, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "InlineObject2", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _params = locals() - _all_params = [ - 'collection', - 'add_layer' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + @validate_call + def add_layer_without_preload_content( + self, + collection: Annotated[StrictStr, Field(description="Layer collection id")], + add_layer: AddLayer, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Add a new layer to a collection + + + :param collection: Layer collection id (required) + :type collection: str + :param add_layer: (required) + :type add_layer: AddLayer + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_layer_serialize( + collection=collection, + add_layer=add_layer, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method add_layer" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': "InlineObject2", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} - if _params['collection']: - _path_params['collection'] = _params['collection'] + def _add_layer_serialize( + self, + collection, + add_layer, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + _host = None + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if collection is not None: + _path_params['collection'] = collection # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None - if _params['add_layer'] is not None: - _body_params = _params['add_layer'] + if add_layer is not None: + _body_params = add_layer + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) # set the HTTP header `Content-Type` - _content_types_list = _params.get('_content_type', - self.api_client.select_header_content_type( - ['application/json'])) - if _content_types_list: - _header_params['Content-Type'] = _content_types_list + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - - _response_types_map = { - '200': "AddCollection200Response", - } + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/layerDb/collections/{collection}/layers', 'POST', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='POST', + resource_path='/layerDb/collections/{collection}/layers', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + - @validate_arguments - def autocomplete_handler(self, provider : Annotated[StrictStr, Field(..., description="Data provider id")], collection : Annotated[StrictStr, Field(..., description="Layer collection id")], search_type : SearchType, search_string : StrictStr, limit : conint(strict=True, ge=0), offset : conint(strict=True, ge=0), **kwargs) -> List[str]: # noqa: E501 - """Autocompletes the search on the contents of the collection of the given provider # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.autocomplete_handler(provider, collection, search_type, search_string, limit, offset, async_req=True) - >>> result = thread.get() + @validate_call + def autocomplete_handler( + self, + provider: Annotated[StrictStr, Field(description="Data provider id")], + collection: Annotated[StrictStr, Field(description="Layer collection id")], + search_type: SearchType, + search_string: StrictStr, + limit: Annotated[int, Field(strict=True, ge=0)], + offset: Annotated[int, Field(strict=True, ge=0)], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> List[str]: + """Autocompletes the search on the contents of the collection of the given provider + :param provider: Data provider id (required) :type provider: str @@ -663,32 +1190,79 @@ def autocomplete_handler(self, provider : Annotated[StrictStr, Field(..., descri :type limit: int :param offset: (required) :type offset: int - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: List[str] - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the autocomplete_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.autocomplete_handler_with_http_info(provider, collection, search_type, search_string, limit, offset, **kwargs) # noqa: E501 - - @validate_arguments - def autocomplete_handler_with_http_info(self, provider : Annotated[StrictStr, Field(..., description="Data provider id")], collection : Annotated[StrictStr, Field(..., description="Layer collection id")], search_type : SearchType, search_string : StrictStr, limit : conint(strict=True, ge=0), offset : conint(strict=True, ge=0), **kwargs) -> ApiResponse: # noqa: E501 - """Autocompletes the search on the contents of the collection of the given provider # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.autocomplete_handler_with_http_info(provider, collection, search_type, search_string, limit, offset, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._autocomplete_handler_serialize( + provider=provider, + collection=collection, + search_type=search_type, + search_string=search_string, + limit=limit, + offset=offset, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[str]", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def autocomplete_handler_with_http_info( + self, + provider: Annotated[StrictStr, Field(description="Data provider id")], + collection: Annotated[StrictStr, Field(description="Layer collection id")], + search_type: SearchType, + search_string: StrictStr, + limit: Annotated[int, Field(strict=True, ge=0)], + offset: Annotated[int, Field(strict=True, ge=0)], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[List[str]]: + """Autocompletes the search on the contents of the collection of the given provider + :param provider: Data provider id (required) :type provider: str @@ -702,1560 +1276,2943 @@ def autocomplete_handler_with_http_info(self, provider : Annotated[StrictStr, Fi :type limit: int :param offset: (required) :type offset: int - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(List[str], status_code(int), headers(HTTPHeaderDict)) - """ - - _params = locals() - - _all_params = [ - 'provider', - 'collection', - 'search_type', - 'search_string', - 'limit', - 'offset' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' - ] + """ # noqa: E501 + + _param = self._autocomplete_handler_serialize( + provider=provider, + collection=collection, + search_type=search_type, + search_string=search_string, + limit=limit, + offset=offset, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method autocomplete_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] - - _collection_formats = {} - - # process the path parameters - _path_params = {} - if _params['provider']: - _path_params['provider'] = _params['provider'] - - if _params['collection']: - _path_params['collection'] = _params['collection'] - - - # process the query parameters - _query_params = [] - if _params.get('search_type') is not None: # noqa: E501 - _query_params.append(('searchType', _params['search_type'].value)) - - if _params.get('search_string') is not None: # noqa: E501 - _query_params.append(('searchString', _params['search_string'])) - - if _params.get('limit') is not None: # noqa: E501 - _query_params.append(('limit', _params['limit'])) - - if _params.get('offset') is not None: # noqa: E501 - _query_params.append(('offset', _params['offset'])) - - # process the header parameters - _header_params = dict(_params.get('_headers', {})) - # process the form parameters - _form_params = [] - _files = {} - # process the body parameter - _body_params = None - # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 - - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - - _response_types_map = { + _response_types_map: Dict[str, Optional[str]] = { '200': "List[str]", } - - return self.api_client.call_api( - '/layers/collections/search/autocomplete/{provider}/{collection}', 'GET', - _path_params, - _query_params, - _header_params, - body=_body_params, - post_params=_form_params, - files=_files, + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, response_types_map=_response_types_map, - auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), - collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + ) - @validate_arguments - def layer_handler(self, provider : Annotated[StrictStr, Field(..., description="Data provider id")], layer : Annotated[StrictStr, Field(..., description="Layer id")], **kwargs) -> Layer: # noqa: E501 - """Retrieves the layer of the given provider # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True + @validate_call + def autocomplete_handler_without_preload_content( + self, + provider: Annotated[StrictStr, Field(description="Data provider id")], + collection: Annotated[StrictStr, Field(description="Layer collection id")], + search_type: SearchType, + search_string: StrictStr, + limit: Annotated[int, Field(strict=True, ge=0)], + offset: Annotated[int, Field(strict=True, ge=0)], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Autocompletes the search on the contents of the collection of the given provider - >>> thread = api.layer_handler(provider, layer, async_req=True) - >>> result = thread.get() :param provider: Data provider id (required) :type provider: str - :param layer: Layer id (required) - :type layer: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: Layer - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the layer_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.layer_handler_with_http_info(provider, layer, **kwargs) # noqa: E501 - - @validate_arguments - def layer_handler_with_http_info(self, provider : Annotated[StrictStr, Field(..., description="Data provider id")], layer : Annotated[StrictStr, Field(..., description="Layer id")], **kwargs) -> ApiResponse: # noqa: E501 - """Retrieves the layer of the given provider # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.layer_handler_with_http_info(provider, layer, async_req=True) - >>> result = thread.get() - - :param provider: Data provider id (required) - :type provider: str - :param layer: Layer id (required) - :type layer: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional + :param collection: Layer collection id (required) + :type collection: str + :param search_type: (required) + :type search_type: SearchType + :param search_string: (required) + :type search_string: str + :param limit: (required) + :type limit: int + :param offset: (required) + :type offset: int :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(Layer, status_code(int), headers(HTTPHeaderDict)) - """ - - _params = locals() - - _all_params = [ - 'provider', - 'layer' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' - ] + """ # noqa: E501 + + _param = self._autocomplete_handler_serialize( + provider=provider, + collection=collection, + search_type=search_type, + search_string=search_string, + limit=limit, + offset=offset, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method layer_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[str]", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _autocomplete_handler_serialize( + self, + provider, + collection, + search_type, + search_string, + limit, + offset, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } - _collection_formats = {} + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None # process the path parameters - _path_params = {} - if _params['provider']: - _path_params['provider'] = _params['provider'] - - if _params['layer']: - _path_params['layer'] = _params['layer'] - - + if provider is not None: + _path_params['provider'] = provider + if collection is not None: + _path_params['collection'] = collection # process the query parameters - _query_params = [] + if search_type is not None: + + _query_params.append(('searchType', search_type.value)) + + if search_string is not None: + + _query_params.append(('searchString', search_string)) + + if limit is not None: + + _query_params.append(('limit', limit)) + + if offset is not None: + + _query_params.append(('offset', offset)) + # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None + + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = { - '200': "Layer", - } + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/layers/{provider}/{layer}', 'GET', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='GET', + resource_path='/layers/collections/search/autocomplete/{provider}/{collection}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + - @validate_arguments - def layer_to_dataset(self, provider : Annotated[StrictStr, Field(..., description="Data provider id")], layer : Annotated[StrictStr, Field(..., description="Layer id")], **kwargs) -> TaskResponse: # noqa: E501 - """Persist a raster layer from a provider as a dataset. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.layer_to_dataset(provider, layer, async_req=True) - >>> result = thread.get() + @validate_call + def layer_handler( + self, + provider: Annotated[StrictStr, Field(description="Data provider id")], + layer: Annotated[StrictStr, Field(description="Layer id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Layer: + """Retrieves the layer of the given provider + :param provider: Data provider id (required) :type provider: str :param layer: Layer id (required) :type layer: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: TaskResponse - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the layer_to_dataset_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.layer_to_dataset_with_http_info(provider, layer, **kwargs) # noqa: E501 - - @validate_arguments - def layer_to_dataset_with_http_info(self, provider : Annotated[StrictStr, Field(..., description="Data provider id")], layer : Annotated[StrictStr, Field(..., description="Layer id")], **kwargs) -> ApiResponse: # noqa: E501 - """Persist a raster layer from a provider as a dataset. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.layer_to_dataset_with_http_info(provider, layer, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._layer_handler_serialize( + provider=provider, + layer=layer, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Layer", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def layer_handler_with_http_info( + self, + provider: Annotated[StrictStr, Field(description="Data provider id")], + layer: Annotated[StrictStr, Field(description="Layer id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[Layer]: + """Retrieves the layer of the given provider + :param provider: Data provider id (required) :type provider: str :param layer: Layer id (required) :type layer: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(TaskResponse, status_code(int), headers(HTTPHeaderDict)) - """ + """ # noqa: E501 + + _param = self._layer_handler_serialize( + provider=provider, + layer=layer, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Layer", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _params = locals() - _all_params = [ - 'provider', - 'layer' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + @validate_call + def layer_handler_without_preload_content( + self, + provider: Annotated[StrictStr, Field(description="Data provider id")], + layer: Annotated[StrictStr, Field(description="Layer id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Retrieves the layer of the given provider + + + :param provider: Data provider id (required) + :type provider: str + :param layer: Layer id (required) + :type layer: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._layer_handler_serialize( + provider=provider, + layer=layer, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method layer_to_dataset" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': "Layer", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} - if _params['provider']: - _path_params['provider'] = _params['provider'] + def _layer_handler_serialize( + self, + provider, + layer, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: - if _params['layer']: - _path_params['layer'] = _params['layer'] + _host = None + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + # process the path parameters + if provider is not None: + _path_params['provider'] = provider + if layer is not None: + _path_params['layer'] = layer # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None + + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = { - '200': "TaskResponse", - } + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/layers/{provider}/{layer}/dataset', 'POST', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='GET', + resource_path='/layers/{provider}/{layer}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) - @validate_arguments - def layer_to_workflow_id_handler(self, provider : Annotated[StrictStr, Field(..., description="Data provider id")], layer : Annotated[StrictStr, Field(..., description="Layer id")], **kwargs) -> AddCollection200Response: # noqa: E501 - """Registers a layer from a provider as a workflow and returns the workflow id # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.layer_to_workflow_id_handler(provider, layer, async_req=True) - >>> result = thread.get() + + @validate_call + def layer_to_dataset( + self, + provider: Annotated[StrictStr, Field(description="Data provider id")], + layer: Annotated[StrictStr, Field(description="Layer id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> TaskResponse: + """Persist a raster layer from a provider as a dataset. + :param provider: Data provider id (required) :type provider: str :param layer: Layer id (required) :type layer: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: AddCollection200Response - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the layer_to_workflow_id_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.layer_to_workflow_id_handler_with_http_info(provider, layer, **kwargs) # noqa: E501 - - @validate_arguments - def layer_to_workflow_id_handler_with_http_info(self, provider : Annotated[StrictStr, Field(..., description="Data provider id")], layer : Annotated[StrictStr, Field(..., description="Layer id")], **kwargs) -> ApiResponse: # noqa: E501 - """Registers a layer from a provider as a workflow and returns the workflow id # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.layer_to_workflow_id_handler_with_http_info(provider, layer, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._layer_to_dataset_serialize( + provider=provider, + layer=layer, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "TaskResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def layer_to_dataset_with_http_info( + self, + provider: Annotated[StrictStr, Field(description="Data provider id")], + layer: Annotated[StrictStr, Field(description="Layer id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[TaskResponse]: + """Persist a raster layer from a provider as a dataset. + :param provider: Data provider id (required) :type provider: str :param layer: Layer id (required) :type layer: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(AddCollection200Response, status_code(int), headers(HTTPHeaderDict)) - """ + """ # noqa: E501 + + _param = self._layer_to_dataset_serialize( + provider=provider, + layer=layer, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "TaskResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _params = locals() - _all_params = [ - 'provider', - 'layer' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + @validate_call + def layer_to_dataset_without_preload_content( + self, + provider: Annotated[StrictStr, Field(description="Data provider id")], + layer: Annotated[StrictStr, Field(description="Layer id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Persist a raster layer from a provider as a dataset. + + + :param provider: Data provider id (required) + :type provider: str + :param layer: Layer id (required) + :type layer: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._layer_to_dataset_serialize( + provider=provider, + layer=layer, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method layer_to_workflow_id_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': "TaskResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} - if _params['provider']: - _path_params['provider'] = _params['provider'] + def _layer_to_dataset_serialize( + self, + provider, + layer, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None - if _params['layer']: - _path_params['layer'] = _params['layer'] + _collection_formats: Dict[str, str] = { + } + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + # process the path parameters + if provider is not None: + _path_params['provider'] = provider + if layer is not None: + _path_params['layer'] = layer # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None + + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = { - '200': "AddCollection200Response", - } + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/layers/{provider}/{layer}/workflowId', 'POST', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='POST', + resource_path='/layers/{provider}/{layer}/dataset', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) - @validate_arguments - def list_collection_handler(self, provider : Annotated[StrictStr, Field(..., description="Data provider id")], collection : Annotated[StrictStr, Field(..., description="Layer collection id")], offset : conint(strict=True, ge=0), limit : conint(strict=True, ge=0), **kwargs) -> LayerCollection: # noqa: E501 - """List the contents of the collection of the given provider # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_collection_handler(provider, collection, offset, limit, async_req=True) - >>> result = thread.get() - :param provider: Data provider id (required) - :type provider: str - :param collection: Layer collection id (required) - :type collection: str - :param offset: (required) - :type offset: int - :param limit: (required) - :type limit: int - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: LayerCollection - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the list_collection_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.list_collection_handler_with_http_info(provider, collection, offset, limit, **kwargs) # noqa: E501 - - @validate_arguments - def list_collection_handler_with_http_info(self, provider : Annotated[StrictStr, Field(..., description="Data provider id")], collection : Annotated[StrictStr, Field(..., description="Layer collection id")], offset : conint(strict=True, ge=0), limit : conint(strict=True, ge=0), **kwargs) -> ApiResponse: # noqa: E501 - """List the contents of the collection of the given provider # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.list_collection_handler_with_http_info(provider, collection, offset, limit, async_req=True) - >>> result = thread.get() + @validate_call + def layer_to_workflow_id_handler( + self, + provider: Annotated[StrictStr, Field(description="Data provider id")], + layer: Annotated[StrictStr, Field(description="Layer id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> InlineObject2: + """Registers a layer from a provider as a workflow and returns the workflow id + :param provider: Data provider id (required) :type provider: str - :param collection: Layer collection id (required) - :type collection: str - :param offset: (required) - :type offset: int - :param limit: (required) - :type limit: int - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional + :param layer: Layer id (required) + :type layer: str :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(LayerCollection, status_code(int), headers(HTTPHeaderDict)) - """ - - _params = locals() - - _all_params = [ - 'provider', - 'collection', - 'offset', - 'limit' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' - ] + """ # noqa: E501 + + _param = self._layer_to_workflow_id_handler_serialize( + provider=provider, + layer=layer, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method list_collection_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] - - _collection_formats = {} - - # process the path parameters - _path_params = {} - if _params['provider']: - _path_params['provider'] = _params['provider'] - - if _params['collection']: - _path_params['collection'] = _params['collection'] - - - # process the query parameters - _query_params = [] - if _params.get('offset') is not None: # noqa: E501 - _query_params.append(('offset', _params['offset'])) - - if _params.get('limit') is not None: # noqa: E501 - _query_params.append(('limit', _params['limit'])) + _response_types_map: Dict[str, Optional[str]] = { + '200': "InlineObject2", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def layer_to_workflow_id_handler_with_http_info( + self, + provider: Annotated[StrictStr, Field(description="Data provider id")], + layer: Annotated[StrictStr, Field(description="Layer id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[InlineObject2]: + """Registers a layer from a provider as a workflow and returns the workflow id - # process the header parameters - _header_params = dict(_params.get('_headers', {})) - # process the form parameters - _form_params = [] - _files = {} - # process the body parameter - _body_params = None - # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 + :param provider: Data provider id (required) + :type provider: str + :param layer: Layer id (required) + :type layer: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._layer_to_workflow_id_handler_serialize( + provider=provider, + layer=layer, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - _response_types_map = { - '200': "LayerCollection", + _response_types_map: Dict[str, Optional[str]] = { + '200': "InlineObject2", } - - return self.api_client.call_api( - '/layers/collections/{provider}/{collection}', 'GET', - _path_params, - _query_params, - _header_params, - body=_body_params, - post_params=_form_params, - files=_files, + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, response_types_map=_response_types_map, - auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), - collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) - - @validate_arguments - def list_root_collections_handler(self, offset : conint(strict=True, ge=0), limit : conint(strict=True, ge=0), **kwargs) -> LayerCollection: # noqa: E501 - """List all layer collections # noqa: E501 + ) - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_root_collections_handler(offset, limit, async_req=True) - >>> result = thread.get() + @validate_call + def layer_to_workflow_id_handler_without_preload_content( + self, + provider: Annotated[StrictStr, Field(description="Data provider id")], + layer: Annotated[StrictStr, Field(description="Layer id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Registers a layer from a provider as a workflow and returns the workflow id - :param offset: (required) - :type offset: int - :param limit: (required) - :type limit: int - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: LayerCollection - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the list_root_collections_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.list_root_collections_handler_with_http_info(offset, limit, **kwargs) # noqa: E501 - - @validate_arguments - def list_root_collections_handler_with_http_info(self, offset : conint(strict=True, ge=0), limit : conint(strict=True, ge=0), **kwargs) -> ApiResponse: # noqa: E501 - """List all layer collections # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.list_root_collections_handler_with_http_info(offset, limit, async_req=True) - >>> result = thread.get() - :param offset: (required) - :type offset: int - :param limit: (required) - :type limit: int - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional + :param provider: Data provider id (required) + :type provider: str + :param layer: Layer id (required) + :type layer: str :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(LayerCollection, status_code(int), headers(HTTPHeaderDict)) - """ - - _params = locals() + """ # noqa: E501 + + _param = self._layer_to_workflow_id_handler_serialize( + provider=provider, + layer=layer, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - _all_params = [ - 'offset', - 'limit' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' - ] + _response_types_map: Dict[str, Optional[str]] = { + '200': "InlineObject2", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) + return response_data.response - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method list_root_collections_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] - _collection_formats = {} + def _layer_to_workflow_id_handler_serialize( + self, + provider, + layer, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: - # process the path parameters - _path_params = {} + _host = None - # process the query parameters - _query_params = [] - if _params.get('offset') is not None: # noqa: E501 - _query_params.append(('offset', _params['offset'])) + _collection_formats: Dict[str, str] = { + } - if _params.get('limit') is not None: # noqa: E501 - _query_params.append(('limit', _params['limit'])) + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + # process the path parameters + if provider is not None: + _path_params['provider'] = provider + if layer is not None: + _path_params['layer'] = layer + # process the query parameters # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None + + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = { - '200': "LayerCollection", - } + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/layers/collections', 'GET', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='POST', + resource_path='/layers/{provider}/{layer}/workflowId', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) - @validate_arguments - def provider_capabilities_handler(self, provider : Annotated[StrictStr, Field(..., description="Data provider id")], **kwargs) -> ProviderCapabilities: # noqa: E501 - """provider_capabilities_handler # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.provider_capabilities_handler(provider, async_req=True) - >>> result = thread.get() - :param provider: Data provider id (required) - :type provider: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: ProviderCapabilities - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the provider_capabilities_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.provider_capabilities_handler_with_http_info(provider, **kwargs) # noqa: E501 - - @validate_arguments - def provider_capabilities_handler_with_http_info(self, provider : Annotated[StrictStr, Field(..., description="Data provider id")], **kwargs) -> ApiResponse: # noqa: E501 - """provider_capabilities_handler # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.provider_capabilities_handler_with_http_info(provider, async_req=True) - >>> result = thread.get() + @validate_call + def list_collection_handler( + self, + provider: Annotated[StrictStr, Field(description="Data provider id")], + collection: Annotated[StrictStr, Field(description="Layer collection id")], + offset: Annotated[int, Field(strict=True, ge=0)], + limit: Annotated[int, Field(strict=True, ge=0)], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> LayerCollection: + """List the contents of the collection of the given provider + :param provider: Data provider id (required) :type provider: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional + :param collection: Layer collection id (required) + :type collection: str + :param offset: (required) + :type offset: int + :param limit: (required) + :type limit: int :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(ProviderCapabilities, status_code(int), headers(HTTPHeaderDict)) - """ - - _params = locals() - - _all_params = [ - 'provider' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' - ] + """ # noqa: E501 + + _param = self._list_collection_handler_serialize( + provider=provider, + collection=collection, + offset=offset, + limit=limit, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method provider_capabilities_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': "LayerCollection", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def list_collection_handler_with_http_info( + self, + provider: Annotated[StrictStr, Field(description="Data provider id")], + collection: Annotated[StrictStr, Field(description="Layer collection id")], + offset: Annotated[int, Field(strict=True, ge=0)], + limit: Annotated[int, Field(strict=True, ge=0)], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[LayerCollection]: + """List the contents of the collection of the given provider + + + :param provider: Data provider id (required) + :type provider: str + :param collection: Layer collection id (required) + :type collection: str + :param offset: (required) + :type offset: int + :param limit: (required) + :type limit: int + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_collection_handler_serialize( + provider=provider, + collection=collection, + offset=offset, + limit=limit, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "LayerCollection", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def list_collection_handler_without_preload_content( + self, + provider: Annotated[StrictStr, Field(description="Data provider id")], + collection: Annotated[StrictStr, Field(description="Layer collection id")], + offset: Annotated[int, Field(strict=True, ge=0)], + limit: Annotated[int, Field(strict=True, ge=0)], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """List the contents of the collection of the given provider + + + :param provider: Data provider id (required) + :type provider: str + :param collection: Layer collection id (required) + :type collection: str + :param offset: (required) + :type offset: int + :param limit: (required) + :type limit: int + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_collection_handler_serialize( + provider=provider, + collection=collection, + offset=offset, + limit=limit, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "LayerCollection", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} + + def _list_collection_handler_serialize( + self, + provider, + collection, + offset, + limit, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None # process the path parameters - _path_params = {} - if _params['provider']: - _path_params['provider'] = _params['provider'] + if provider is not None: + _path_params['provider'] = provider + if collection is not None: + _path_params['collection'] = collection + # process the query parameters + if offset is not None: + + _query_params.append(('offset', offset)) + + if limit is not None: + + _query_params.append(('limit', limit)) + + # process the header parameters + # process the form parameters + # process the body parameter + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/layers/collections/{provider}/{collection}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def list_root_collections_handler( + self, + offset: Annotated[int, Field(strict=True, ge=0)], + limit: Annotated[int, Field(strict=True, ge=0)], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> LayerCollection: + """List all layer collections + + + :param offset: (required) + :type offset: int + :param limit: (required) + :type limit: int + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_root_collections_handler_serialize( + offset=offset, + limit=limit, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "LayerCollection", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def list_root_collections_handler_with_http_info( + self, + offset: Annotated[int, Field(strict=True, ge=0)], + limit: Annotated[int, Field(strict=True, ge=0)], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[LayerCollection]: + """List all layer collections + + + :param offset: (required) + :type offset: int + :param limit: (required) + :type limit: int + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_root_collections_handler_serialize( + offset=offset, + limit=limit, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "LayerCollection", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def list_root_collections_handler_without_preload_content( + self, + offset: Annotated[int, Field(strict=True, ge=0)], + limit: Annotated[int, Field(strict=True, ge=0)], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """List all layer collections + + + :param offset: (required) + :type offset: int + :param limit: (required) + :type limit: int + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_root_collections_handler_serialize( + offset=offset, + limit=limit, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "LayerCollection", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _list_root_collections_handler_serialize( + self, + offset, + limit, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters # process the query parameters - _query_params = [] + if offset is not None: + + _query_params.append(('offset', offset)) + + if limit is not None: + + _query_params.append(('limit', limit)) + # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None + + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + # authentication setting - _auth_settings = ['session_token'] # noqa: E501 + _auth_settings: List[str] = [ + 'session_token' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/layers/collections', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def provider_capabilities_handler( + self, + provider: Annotated[StrictStr, Field(description="Data provider id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ProviderCapabilities: + """provider_capabilities_handler + + + :param provider: Data provider id (required) + :type provider: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._provider_capabilities_handler_serialize( + provider=provider, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "ProviderCapabilities", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def provider_capabilities_handler_with_http_info( + self, + provider: Annotated[StrictStr, Field(description="Data provider id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[ProviderCapabilities]: + """provider_capabilities_handler + + + :param provider: Data provider id (required) + :type provider: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._provider_capabilities_handler_serialize( + provider=provider, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "ProviderCapabilities", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + - _response_types_map = { + @validate_call + def provider_capabilities_handler_without_preload_content( + self, + provider: Annotated[StrictStr, Field(description="Data provider id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """provider_capabilities_handler + + + :param provider: Data provider id (required) + :type provider: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._provider_capabilities_handler_serialize( + provider=provider, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { '200': "ProviderCapabilities", } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _provider_capabilities_handler_serialize( + self, + provider, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } - return self.api_client.call_api( - '/layers/{provider}/capabilities', 'GET', - _path_params, - _query_params, - _header_params, + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if provider is not None: + _path_params['provider'] = provider + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/layers/{provider}/capabilities', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) - @validate_arguments - def remove_collection(self, collection : Annotated[StrictStr, Field(..., description="Layer collection id")], **kwargs) -> None: # noqa: E501 - """Remove a collection # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.remove_collection(collection, async_req=True) - >>> result = thread.get() + + @validate_call + def remove_collection( + self, + collection: Annotated[StrictStr, Field(description="Layer collection id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Remove a collection + :param collection: Layer collection id (required) :type collection: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the remove_collection_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.remove_collection_with_http_info(collection, **kwargs) # noqa: E501 - - @validate_arguments - def remove_collection_with_http_info(self, collection : Annotated[StrictStr, Field(..., description="Layer collection id")], **kwargs) -> ApiResponse: # noqa: E501 - """Remove a collection # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.remove_collection_with_http_info(collection, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._remove_collection_serialize( + collection=collection, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def remove_collection_with_http_info( + self, + collection: Annotated[StrictStr, Field(description="Layer collection id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Remove a collection + :param collection: Layer collection id (required) :type collection: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ + """ # noqa: E501 + + _param = self._remove_collection_serialize( + collection=collection, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - _params = locals() + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _all_params = [ - 'collection' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + + @validate_call + def remove_collection_without_preload_content( + self, + collection: Annotated[StrictStr, Field(description="Layer collection id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Remove a collection + + + :param collection: Layer collection id (required) + :type collection: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._remove_collection_serialize( + collection=collection, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method remove_collection" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} - if _params['collection']: - _path_params['collection'] = _params['collection'] + def _remove_collection_serialize( + self, + collection, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + # process the path parameters + if collection is not None: + _path_params['collection'] = collection # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = {} - return self.api_client.call_api( - '/layerDb/collections/{collection}', 'DELETE', - _path_params, - _query_params, - _header_params, + + + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/layerDb/collections/{collection}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + - @validate_arguments - def remove_collection_from_collection(self, parent : Annotated[StrictStr, Field(..., description="Parent layer collection id")], collection : Annotated[StrictStr, Field(..., description="Layer collection id")], **kwargs) -> None: # noqa: E501 - """Delete a collection from a collection # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.remove_collection_from_collection(parent, collection, async_req=True) - >>> result = thread.get() + @validate_call + def remove_collection_from_collection( + self, + parent: Annotated[StrictStr, Field(description="Parent layer collection id")], + collection: Annotated[StrictStr, Field(description="Layer collection id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Delete a collection from a collection + :param parent: Parent layer collection id (required) :type parent: str :param collection: Layer collection id (required) :type collection: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the remove_collection_from_collection_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.remove_collection_from_collection_with_http_info(parent, collection, **kwargs) # noqa: E501 - - @validate_arguments - def remove_collection_from_collection_with_http_info(self, parent : Annotated[StrictStr, Field(..., description="Parent layer collection id")], collection : Annotated[StrictStr, Field(..., description="Layer collection id")], **kwargs) -> ApiResponse: # noqa: E501 - """Delete a collection from a collection # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.remove_collection_from_collection_with_http_info(parent, collection, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._remove_collection_from_collection_serialize( + parent=parent, + collection=collection, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def remove_collection_from_collection_with_http_info( + self, + parent: Annotated[StrictStr, Field(description="Parent layer collection id")], + collection: Annotated[StrictStr, Field(description="Layer collection id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Delete a collection from a collection + :param parent: Parent layer collection id (required) :type parent: str :param collection: Layer collection id (required) :type collection: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._remove_collection_from_collection_serialize( + parent=parent, + collection=collection, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def remove_collection_from_collection_without_preload_content( + self, + parent: Annotated[StrictStr, Field(description="Parent layer collection id")], + collection: Annotated[StrictStr, Field(description="Layer collection id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Delete a collection from a collection + + + :param parent: Parent layer collection id (required) + :type parent: str + :param collection: Layer collection id (required) + :type collection: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._remove_collection_from_collection_serialize( + parent=parent, + collection=collection, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _remove_collection_from_collection_serialize( + self, + parent, + collection, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if parent is not None: + _path_params['parent'] = parent + if collection is not None: + _path_params['collection'] = collection + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + + + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/layerDb/collections/{parent}/collections/{collection}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def remove_layer( + self, + layer: Annotated[StrictStr, Field(description="Layer id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Remove a collection + + + :param layer: Layer id (required) + :type layer: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._remove_layer_serialize( + layer=layer, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def remove_layer_with_http_info( + self, + layer: Annotated[StrictStr, Field(description="Layer id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Remove a collection + + + :param layer: Layer id (required) + :type layer: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._remove_layer_serialize( + layer=layer, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def remove_layer_without_preload_content( + self, + layer: Annotated[StrictStr, Field(description="Layer id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Remove a collection + + + :param layer: Layer id (required) + :type layer: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ - - _params = locals() + """ # noqa: E501 + + _param = self._remove_layer_serialize( + layer=layer, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - _all_params = [ - 'parent', - 'collection' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' - ] + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) + return response_data.response - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method remove_collection_from_collection" % _key - ) - _params[_key] = _val - del _params['kwargs'] - _collection_formats = {} + def _remove_layer_serialize( + self, + layer, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: - # process the path parameters - _path_params = {} - if _params['parent']: - _path_params['parent'] = _params['parent'] + _host = None - if _params['collection']: - _path_params['collection'] = _params['collection'] + _collection_formats: Dict[str, str] = { + } + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + # process the path parameters + if layer is not None: + _path_params['layer'] = layer # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = {} - return self.api_client.call_api( - '/layerDb/collections/{parent}/collections/{collection}', 'DELETE', - _path_params, - _query_params, - _header_params, + + + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/layerDb/layers/{layer}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) - @validate_arguments - def remove_layer(self, layer : Annotated[StrictStr, Field(..., description="Layer id")], **kwargs) -> None: # noqa: E501 - """Remove a collection # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.remove_layer(layer, async_req=True) - >>> result = thread.get() - :param layer: Layer id (required) - :type layer: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the remove_layer_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.remove_layer_with_http_info(layer, **kwargs) # noqa: E501 - - @validate_arguments - def remove_layer_with_http_info(self, layer : Annotated[StrictStr, Field(..., description="Layer id")], **kwargs) -> ApiResponse: # noqa: E501 - """Remove a collection # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.remove_layer_with_http_info(layer, async_req=True) - >>> result = thread.get() + @validate_call + def remove_layer_from_collection( + self, + collection: Annotated[StrictStr, Field(description="Layer collection id")], + layer: Annotated[StrictStr, Field(description="Layer id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Remove a layer from a collection + + :param collection: Layer collection id (required) + :type collection: str :param layer: Layer id (required) :type layer: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ - - _params = locals() - - _all_params = [ - 'layer' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' - ] + """ # noqa: E501 + + _param = self._remove_layer_from_collection_serialize( + collection=collection, + layer=layer, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method remove_layer" % _key - ) - _params[_key] = _val - del _params['kwargs'] - - _collection_formats = {} - - # process the path parameters - _path_params = {} - if _params['layer']: - _path_params['layer'] = _params['layer'] - - - # process the query parameters - _query_params = [] - # process the header parameters - _header_params = dict(_params.get('_headers', {})) - # process the form parameters - _form_params = [] - _files = {} - # process the body parameter - _body_params = None - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - - _response_types_map = {} - - return self.api_client.call_api( - '/layerDb/layers/{layer}', 'DELETE', - _path_params, - _query_params, - _header_params, - body=_body_params, - post_params=_form_params, - files=_files, + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, response_types_map=_response_types_map, - auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), - collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) - - @validate_arguments - def remove_layer_from_collection(self, collection : Annotated[StrictStr, Field(..., description="Layer collection id")], layer : Annotated[StrictStr, Field(..., description="Layer id")], **kwargs) -> None: # noqa: E501 - """Remove a layer from a collection # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True + ).data + + + @validate_call + def remove_layer_from_collection_with_http_info( + self, + collection: Annotated[StrictStr, Field(description="Layer collection id")], + layer: Annotated[StrictStr, Field(description="Layer id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Remove a layer from a collection - >>> thread = api.remove_layer_from_collection(collection, layer, async_req=True) - >>> result = thread.get() :param collection: Layer collection id (required) :type collection: str :param layer: Layer id (required) :type layer: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the remove_layer_from_collection_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.remove_layer_from_collection_with_http_info(collection, layer, **kwargs) # noqa: E501 - - @validate_arguments - def remove_layer_from_collection_with_http_info(self, collection : Annotated[StrictStr, Field(..., description="Layer collection id")], layer : Annotated[StrictStr, Field(..., description="Layer id")], **kwargs) -> ApiResponse: # noqa: E501 - """Remove a layer from a collection # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.remove_layer_from_collection_with_http_info(collection, layer, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._remove_layer_from_collection_serialize( + collection=collection, + layer=layer, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def remove_layer_from_collection_without_preload_content( + self, + collection: Annotated[StrictStr, Field(description="Layer collection id")], + layer: Annotated[StrictStr, Field(description="Layer id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Remove a layer from a collection + :param collection: Layer collection id (required) :type collection: str :param layer: Layer id (required) :type layer: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ - - _params = locals() + """ # noqa: E501 + + _param = self._remove_layer_from_collection_serialize( + collection=collection, + layer=layer, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - _all_params = [ - 'collection', - 'layer' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' - ] + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) + return response_data.response - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method remove_layer_from_collection" % _key - ) - _params[_key] = _val - del _params['kwargs'] - _collection_formats = {} + def _remove_layer_from_collection_serialize( + self, + collection, + layer, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: - # process the path parameters - _path_params = {} - if _params['collection']: - _path_params['collection'] = _params['collection'] + _host = None - if _params['layer']: - _path_params['layer'] = _params['layer'] + _collection_formats: Dict[str, str] = { + } + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + # process the path parameters + if collection is not None: + _path_params['collection'] = collection + if layer is not None: + _path_params['layer'] = layer # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = {} - return self.api_client.call_api( - '/layerDb/collections/{collection}/layers/{layer}', 'DELETE', - _path_params, - _query_params, - _header_params, + + + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/layerDb/collections/{collection}/layers/{layer}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + - @validate_arguments - def search_handler(self, provider : Annotated[StrictStr, Field(..., description="Data provider id")], collection : Annotated[StrictStr, Field(..., description="Layer collection id")], search_type : SearchType, search_string : StrictStr, limit : conint(strict=True, ge=0), offset : conint(strict=True, ge=0), **kwargs) -> LayerCollection: # noqa: E501 - """Searches the contents of the collection of the given provider # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.search_handler(provider, collection, search_type, search_string, limit, offset, async_req=True) - >>> result = thread.get() + @validate_call + def search_handler( + self, + provider: Annotated[StrictStr, Field(description="Data provider id")], + collection: Annotated[StrictStr, Field(description="Layer collection id")], + search_type: SearchType, + search_string: StrictStr, + limit: Annotated[int, Field(strict=True, ge=0)], + offset: Annotated[int, Field(strict=True, ge=0)], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> LayerCollection: + """Searches the contents of the collection of the given provider + :param provider: Data provider id (required) :type provider: str @@ -2269,32 +4226,79 @@ def search_handler(self, provider : Annotated[StrictStr, Field(..., description= :type limit: int :param offset: (required) :type offset: int - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: LayerCollection - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the search_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.search_handler_with_http_info(provider, collection, search_type, search_string, limit, offset, **kwargs) # noqa: E501 - - @validate_arguments - def search_handler_with_http_info(self, provider : Annotated[StrictStr, Field(..., description="Data provider id")], collection : Annotated[StrictStr, Field(..., description="Layer collection id")], search_type : SearchType, search_string : StrictStr, limit : conint(strict=True, ge=0), offset : conint(strict=True, ge=0), **kwargs) -> ApiResponse: # noqa: E501 - """Searches the contents of the collection of the given provider # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.search_handler_with_http_info(provider, collection, search_type, search_string, limit, offset, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._search_handler_serialize( + provider=provider, + collection=collection, + search_type=search_type, + search_string=search_string, + limit=limit, + offset=offset, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "LayerCollection", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def search_handler_with_http_info( + self, + provider: Annotated[StrictStr, Field(description="Data provider id")], + collection: Annotated[StrictStr, Field(description="Layer collection id")], + search_type: SearchType, + search_string: StrictStr, + limit: Annotated[int, Field(strict=True, ge=0)], + offset: Annotated[int, Field(strict=True, ge=0)], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[LayerCollection]: + """Searches the contents of the collection of the given provider + :param provider: Data provider id (required) :type provider: str @@ -2308,413 +4312,777 @@ def search_handler_with_http_info(self, provider : Annotated[StrictStr, Field(.. :type limit: int :param offset: (required) :type offset: int - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(LayerCollection, status_code(int), headers(HTTPHeaderDict)) - """ - - _params = locals() - - _all_params = [ - 'provider', - 'collection', - 'search_type', - 'search_string', - 'limit', - 'offset' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' - ] + """ # noqa: E501 + + _param = self._search_handler_serialize( + provider=provider, + collection=collection, + search_type=search_type, + search_string=search_string, + limit=limit, + offset=offset, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method search_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] - - _collection_formats = {} - - # process the path parameters - _path_params = {} - if _params['provider']: - _path_params['provider'] = _params['provider'] + _response_types_map: Dict[str, Optional[str]] = { + '200': "LayerCollection", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - if _params['collection']: - _path_params['collection'] = _params['collection'] + @validate_call + def search_handler_without_preload_content( + self, + provider: Annotated[StrictStr, Field(description="Data provider id")], + collection: Annotated[StrictStr, Field(description="Layer collection id")], + search_type: SearchType, + search_string: StrictStr, + limit: Annotated[int, Field(strict=True, ge=0)], + offset: Annotated[int, Field(strict=True, ge=0)], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Searches the contents of the collection of the given provider - # process the query parameters - _query_params = [] - if _params.get('search_type') is not None: # noqa: E501 - _query_params.append(('searchType', _params['search_type'].value)) - if _params.get('search_string') is not None: # noqa: E501 - _query_params.append(('searchString', _params['search_string'])) + :param provider: Data provider id (required) + :type provider: str + :param collection: Layer collection id (required) + :type collection: str + :param search_type: (required) + :type search_type: SearchType + :param search_string: (required) + :type search_string: str + :param limit: (required) + :type limit: int + :param offset: (required) + :type offset: int + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._search_handler_serialize( + provider=provider, + collection=collection, + search_type=search_type, + search_string=search_string, + limit=limit, + offset=offset, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - if _params.get('limit') is not None: # noqa: E501 - _query_params.append(('limit', _params['limit'])) + _response_types_map: Dict[str, Optional[str]] = { + '200': "LayerCollection", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _search_handler_serialize( + self, + provider, + collection, + search_type, + search_string, + limit, + offset, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } - if _params.get('offset') is not None: # noqa: E501 - _query_params.append(('offset', _params['offset'])) + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + # process the path parameters + if provider is not None: + _path_params['provider'] = provider + if collection is not None: + _path_params['collection'] = collection + # process the query parameters + if search_type is not None: + + _query_params.append(('searchType', search_type.value)) + + if search_string is not None: + + _query_params.append(('searchString', search_string)) + + if limit is not None: + + _query_params.append(('limit', limit)) + + if offset is not None: + + _query_params.append(('offset', offset)) + # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None + + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = { - '200': "LayerCollection", - } + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/layers/collections/search/{provider}/{collection}', 'GET', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='GET', + resource_path='/layers/collections/search/{provider}/{collection}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) - @validate_arguments - def update_collection(self, collection : Annotated[StrictStr, Field(..., description="Layer collection id")], update_layer_collection : UpdateLayerCollection, **kwargs) -> None: # noqa: E501 - """Update a collection # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.update_collection(collection, update_layer_collection, async_req=True) - >>> result = thread.get() + + @validate_call + def update_collection( + self, + collection: Annotated[StrictStr, Field(description="Layer collection id")], + update_layer_collection: UpdateLayerCollection, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Update a collection + :param collection: Layer collection id (required) :type collection: str :param update_layer_collection: (required) :type update_layer_collection: UpdateLayerCollection - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the update_collection_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.update_collection_with_http_info(collection, update_layer_collection, **kwargs) # noqa: E501 - - @validate_arguments - def update_collection_with_http_info(self, collection : Annotated[StrictStr, Field(..., description="Layer collection id")], update_layer_collection : UpdateLayerCollection, **kwargs) -> ApiResponse: # noqa: E501 - """Update a collection # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.update_collection_with_http_info(collection, update_layer_collection, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._update_collection_serialize( + collection=collection, + update_layer_collection=update_layer_collection, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def update_collection_with_http_info( + self, + collection: Annotated[StrictStr, Field(description="Layer collection id")], + update_layer_collection: UpdateLayerCollection, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Update a collection + :param collection: Layer collection id (required) :type collection: str :param update_layer_collection: (required) :type update_layer_collection: UpdateLayerCollection - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ + """ # noqa: E501 + + _param = self._update_collection_serialize( + collection=collection, + update_layer_collection=update_layer_collection, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _params = locals() - _all_params = [ - 'collection', - 'update_layer_collection' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + @validate_call + def update_collection_without_preload_content( + self, + collection: Annotated[StrictStr, Field(description="Layer collection id")], + update_layer_collection: UpdateLayerCollection, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Update a collection + + + :param collection: Layer collection id (required) + :type collection: str + :param update_layer_collection: (required) + :type update_layer_collection: UpdateLayerCollection + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_collection_serialize( + collection=collection, + update_layer_collection=update_layer_collection, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method update_collection" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} - if _params['collection']: - _path_params['collection'] = _params['collection'] + def _update_collection_serialize( + self, + collection, + update_layer_collection, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + _host = None + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if collection is not None: + _path_params['collection'] = collection # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None - if _params['update_layer_collection'] is not None: - _body_params = _params['update_layer_collection'] + if update_layer_collection is not None: + _body_params = update_layer_collection + + # set the HTTP header `Content-Type` - _content_types_list = _params.get('_content_type', - self.api_client.select_header_content_type( - ['application/json'])) - if _content_types_list: - _header_params['Content-Type'] = _content_types_list + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - - _response_types_map = {} + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/layerDb/collections/{collection}', 'PUT', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='PUT', + resource_path='/layerDb/collections/{collection}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + - @validate_arguments - def update_layer(self, layer : Annotated[StrictStr, Field(..., description="Layer id")], update_layer : UpdateLayer, **kwargs) -> None: # noqa: E501 - """Update a layer # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.update_layer(layer, update_layer, async_req=True) - >>> result = thread.get() + @validate_call + def update_layer( + self, + layer: Annotated[StrictStr, Field(description="Layer id")], + update_layer: UpdateLayer, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Update a layer + :param layer: Layer id (required) :type layer: str :param update_layer: (required) :type update_layer: UpdateLayer - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the update_layer_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.update_layer_with_http_info(layer, update_layer, **kwargs) # noqa: E501 - - @validate_arguments - def update_layer_with_http_info(self, layer : Annotated[StrictStr, Field(..., description="Layer id")], update_layer : UpdateLayer, **kwargs) -> ApiResponse: # noqa: E501 - """Update a layer # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.update_layer_with_http_info(layer, update_layer, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._update_layer_serialize( + layer=layer, + update_layer=update_layer, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def update_layer_with_http_info( + self, + layer: Annotated[StrictStr, Field(description="Layer id")], + update_layer: UpdateLayer, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Update a layer + :param layer: Layer id (required) :type layer: str :param update_layer: (required) :type update_layer: UpdateLayer - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ + """ # noqa: E501 + + _param = self._update_layer_serialize( + layer=layer, + update_layer=update_layer, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _params = locals() - _all_params = [ - 'layer', - 'update_layer' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + @validate_call + def update_layer_without_preload_content( + self, + layer: Annotated[StrictStr, Field(description="Layer id")], + update_layer: UpdateLayer, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Update a layer + + + :param layer: Layer id (required) + :type layer: str + :param update_layer: (required) + :type update_layer: UpdateLayer + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_layer_serialize( + layer=layer, + update_layer=update_layer, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method update_layer" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} - if _params['layer']: - _path_params['layer'] = _params['layer'] + def _update_layer_serialize( + self, + layer, + update_layer, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + # process the path parameters + if layer is not None: + _path_params['layer'] = layer # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None - if _params['update_layer'] is not None: - _body_params = _params['update_layer'] + if update_layer is not None: + _body_params = update_layer + + # set the HTTP header `Content-Type` - _content_types_list = _params.get('_content_type', - self.api_client.select_header_content_type( - ['application/json'])) - if _content_types_list: - _header_params['Content-Type'] = _content_types_list + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - - _response_types_map = {} + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/layerDb/layers/{layer}', 'PUT', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='PUT', + resource_path='/layerDb/layers/{layer}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/python/geoengine_openapi_client/api/ml_api.py b/python/geoengine_openapi_client/api/ml_api.py index c0ef41b6..1f9d40dc 100644 --- a/python/geoengine_openapi_client/api/ml_api.py +++ b/python/geoengine_openapi_client/api/ml_api.py @@ -12,27 +12,20 @@ Do not edit the class manually. """ # noqa: E501 - -import re # noqa: F401 -import io import warnings - -from pydantic import validate_arguments, ValidationError - +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union from typing_extensions import Annotated -from pydantic import Field, StrictStr +from pydantic import Field, StrictStr from typing import List - +from typing_extensions import Annotated from geoengine_openapi_client.models.ml_model import MlModel from geoengine_openapi_client.models.ml_model_name_response import MlModelNameResponse -from geoengine_openapi_client.api_client import ApiClient +from geoengine_openapi_client.api_client import ApiClient, RequestSerialized from geoengine_openapi_client.api_response import ApiResponse -from geoengine_openapi_client.exceptions import ( # noqa: F401 - ApiTypeError, - ApiValueError -) +from geoengine_openapi_client.rest import RESTResponseType class MLApi: @@ -47,415 +40,774 @@ def __init__(self, api_client=None) -> None: api_client = ApiClient.get_default() self.api_client = api_client - @validate_arguments - def add_ml_model(self, ml_model : MlModel, **kwargs) -> MlModelNameResponse: # noqa: E501 - """Create a new ml model. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True + @validate_call + def add_ml_model( + self, + ml_model: MlModel, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> MlModelNameResponse: + """Create a new ml model. - >>> thread = api.add_ml_model(ml_model, async_req=True) - >>> result = thread.get() :param ml_model: (required) :type ml_model: MlModel - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: MlModelNameResponse - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the add_ml_model_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.add_ml_model_with_http_info(ml_model, **kwargs) # noqa: E501 - - @validate_arguments - def add_ml_model_with_http_info(self, ml_model : MlModel, **kwargs) -> ApiResponse: # noqa: E501 - """Create a new ml model. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.add_ml_model_with_http_info(ml_model, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._add_ml_model_serialize( + ml_model=ml_model, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "MlModelNameResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def add_ml_model_with_http_info( + self, + ml_model: MlModel, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[MlModelNameResponse]: + """Create a new ml model. + :param ml_model: (required) :type ml_model: MlModel - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(MlModelNameResponse, status_code(int), headers(HTTPHeaderDict)) - """ + """ # noqa: E501 + + _param = self._add_ml_model_serialize( + ml_model=ml_model, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - _params = locals() + _response_types_map: Dict[str, Optional[str]] = { + '200': "MlModelNameResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _all_params = [ - 'ml_model' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + + @validate_call + def add_ml_model_without_preload_content( + self, + ml_model: MlModel, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Create a new ml model. + + + :param ml_model: (required) + :type ml_model: MlModel + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_ml_model_serialize( + ml_model=ml_model, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method add_ml_model" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': "MlModelNameResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} + def _add_ml_model_serialize( + self, + ml_model, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None - if _params['ml_model'] is not None: - _body_params = _params['ml_model'] + if ml_model is not None: + _body_params = ml_model + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) # set the HTTP header `Content-Type` - _content_types_list = _params.get('_content_type', - self.api_client.select_header_content_type( - ['application/json'])) - if _content_types_list: - _header_params['Content-Type'] = _content_types_list + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - - _response_types_map = { - '200': "MlModelNameResponse", - } + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/ml/models', 'POST', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='POST', + resource_path='/ml/models', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) - @validate_arguments - def get_ml_model(self, model_name : Annotated[StrictStr, Field(..., description="Ml Model Name")], **kwargs) -> MlModel: # noqa: E501 - """Get ml model by name. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_ml_model(model_name, async_req=True) - >>> result = thread.get() + + @validate_call + def get_ml_model( + self, + model_name: Annotated[StrictStr, Field(description="Ml Model Name")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> MlModel: + """Get ml model by name. + :param model_name: Ml Model Name (required) :type model_name: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: MlModel - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the get_ml_model_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.get_ml_model_with_http_info(model_name, **kwargs) # noqa: E501 - - @validate_arguments - def get_ml_model_with_http_info(self, model_name : Annotated[StrictStr, Field(..., description="Ml Model Name")], **kwargs) -> ApiResponse: # noqa: E501 - """Get ml model by name. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.get_ml_model_with_http_info(model_name, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._get_ml_model_serialize( + model_name=model_name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "MlModel", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_ml_model_with_http_info( + self, + model_name: Annotated[StrictStr, Field(description="Ml Model Name")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[MlModel]: + """Get ml model by name. + :param model_name: Ml Model Name (required) :type model_name: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(MlModel, status_code(int), headers(HTTPHeaderDict)) - """ + """ # noqa: E501 + + _param = self._get_ml_model_serialize( + model_name=model_name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "MlModel", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _params = locals() - _all_params = [ - 'model_name' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + @validate_call + def get_ml_model_without_preload_content( + self, + model_name: Annotated[StrictStr, Field(description="Ml Model Name")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get ml model by name. + + + :param model_name: Ml Model Name (required) + :type model_name: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_ml_model_serialize( + model_name=model_name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method get_ml_model" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': "MlModel", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} - if _params['model_name']: - _path_params['model_name'] = _params['model_name'] + def _get_ml_model_serialize( + self, + model_name, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + # process the path parameters + if model_name is not None: + _path_params['model_name'] = model_name # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None + + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = { - '200': "MlModel", - } + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/ml/models/{model_name}', 'GET', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='GET', + resource_path='/ml/models/{model_name}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) - @validate_arguments - def list_ml_models(self, **kwargs) -> List[MlModel]: # noqa: E501 - """List ml models. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_ml_models(async_req=True) - >>> result = thread.get() - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + @validate_call + def list_ml_models( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> List[MlModel]: + """List ml models. + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: List[MlModel] - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the list_ml_models_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.list_ml_models_with_http_info(**kwargs) # noqa: E501 - - @validate_arguments - def list_ml_models_with_http_info(self, **kwargs) -> ApiResponse: # noqa: E501 - """List ml models. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.list_ml_models_with_http_info(async_req=True) - >>> result = thread.get() - - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional + """ # noqa: E501 + + _param = self._list_ml_models_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[MlModel]", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def list_ml_models_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[List[MlModel]]: + """List ml models. + + :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(List[MlModel], status_code(int), headers(HTTPHeaderDict)) - """ + """ # noqa: E501 - _params = locals() + _param = self._list_ml_models_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - _all_params = [ - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[MlModel]", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def list_ml_models_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """List ml models. + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_ml_models_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method list_ml_models" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[MlModel]", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} + def _list_ml_models_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None + + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = { - '200': "List[MlModel]", - } + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/ml/models', 'GET', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='GET', + resource_path='/ml/models', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/python/geoengine_openapi_client/api/ogcwcs_api.py b/python/geoengine_openapi_client/api/ogcwcs_api.py index b968e241..562c7306 100644 --- a/python/geoengine_openapi_client/api/ogcwcs_api.py +++ b/python/geoengine_openapi_client/api/ogcwcs_api.py @@ -12,18 +12,14 @@ Do not edit the class manually. """ # noqa: E501 - -import re # noqa: F401 -import io import warnings - -from pydantic import validate_arguments, ValidationError - +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union from typing_extensions import Annotated -from pydantic import Field, StrictFloat, StrictInt, StrictStr - -from typing import Any, Optional, Union +from pydantic import Field, StrictFloat, StrictInt, StrictStr +from typing import List, Optional, Union +from typing_extensions import Annotated from geoengine_openapi_client.models.describe_coverage_request import DescribeCoverageRequest from geoengine_openapi_client.models.get_capabilities_request import GetCapabilitiesRequest from geoengine_openapi_client.models.get_coverage_format import GetCoverageFormat @@ -31,12 +27,9 @@ from geoengine_openapi_client.models.wcs_service import WcsService from geoengine_openapi_client.models.wcs_version import WcsVersion -from geoengine_openapi_client.api_client import ApiClient +from geoengine_openapi_client.api_client import ApiClient, RequestSerialized from geoengine_openapi_client.api_response import ApiResponse -from geoengine_openapi_client.exceptions import ( # noqa: F401 - ApiTypeError, - ApiValueError -) +from geoengine_openapi_client.rest import RESTResponseType class OGCWCSApi: @@ -51,15 +44,29 @@ def __init__(self, api_client=None) -> None: api_client = ApiClient.get_default() self.api_client = api_client - @validate_arguments - def wcs_capabilities_handler(self, workflow : Annotated[StrictStr, Field(..., description="Workflow id")], service : WcsService, request : GetCapabilitiesRequest, version : Optional[Any] = None, **kwargs) -> str: # noqa: E501 - """Get WCS Capabilities # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True + @validate_call + def wcs_capabilities_handler( + self, + workflow: Annotated[StrictStr, Field(description="Workflow id")], + service: WcsService, + request: GetCapabilitiesRequest, + version: Optional[WcsVersion] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """Get WCS Capabilities - >>> thread = api.wcs_capabilities_handler(workflow, service, request, version, async_req=True) - >>> result = thread.get() :param workflow: Workflow id (required) :type workflow: str @@ -69,32 +76,75 @@ def wcs_capabilities_handler(self, workflow : Annotated[StrictStr, Field(..., de :type request: GetCapabilitiesRequest :param version: :type version: WcsVersion - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: str - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the wcs_capabilities_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.wcs_capabilities_handler_with_http_info(workflow, service, request, version, **kwargs) # noqa: E501 - - @validate_arguments - def wcs_capabilities_handler_with_http_info(self, workflow : Annotated[StrictStr, Field(..., description="Workflow id")], service : WcsService, request : GetCapabilitiesRequest, version : Optional[Any] = None, **kwargs) -> ApiResponse: # noqa: E501 - """Get WCS Capabilities # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.wcs_capabilities_handler_with_http_info(workflow, service, request, version, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._wcs_capabilities_handler_serialize( + workflow=workflow, + service=service, + request=request, + version=version, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def wcs_capabilities_handler_with_http_info( + self, + workflow: Annotated[StrictStr, Field(description="Workflow id")], + service: WcsService, + request: GetCapabilitiesRequest, + version: Optional[WcsVersion] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """Get WCS Capabilities + :param workflow: Workflow id (required) :type workflow: str @@ -104,124 +154,229 @@ def wcs_capabilities_handler_with_http_info(self, workflow : Annotated[StrictStr :type request: GetCapabilitiesRequest :param version: :type version: WcsVersion - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(str, status_code(int), headers(HTTPHeaderDict)) - """ - - _params = locals() - - _all_params = [ - 'workflow', - 'service', - 'request', - 'version' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' - ] + """ # noqa: E501 + + _param = self._wcs_capabilities_handler_serialize( + workflow=workflow, + service=service, + request=request, + version=version, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method wcs_capabilities_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] - _collection_formats = {} + @validate_call + def wcs_capabilities_handler_without_preload_content( + self, + workflow: Annotated[StrictStr, Field(description="Workflow id")], + service: WcsService, + request: GetCapabilitiesRequest, + version: Optional[WcsVersion] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get WCS Capabilities - # process the path parameters - _path_params = {} - if _params['workflow']: - _path_params['workflow'] = _params['workflow'] + :param workflow: Workflow id (required) + :type workflow: str + :param service: (required) + :type service: WcsService + :param request: (required) + :type request: GetCapabilitiesRequest + :param version: + :type version: WcsVersion + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._wcs_capabilities_handler_serialize( + workflow=workflow, + service=service, + request=request, + version=version, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - # process the query parameters - _query_params = [] - if _params.get('version') is not None: # noqa: E501 - _query_params.append(('version', _params['version'].value)) + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _wcs_capabilities_handler_serialize( + self, + workflow, + service, + request, + version, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: - if _params.get('service') is not None: # noqa: E501 - _query_params.append(('service', _params['service'].value)) + _host = None - if _params.get('request') is not None: # noqa: E501 - _query_params.append(('request', _params['request'].value)) + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + # process the path parameters + if workflow is not None: + _path_params['workflow'] = workflow + # process the query parameters + if version is not None: + + _query_params.append(('version', version.value)) + + if service is not None: + + _query_params.append(('service', service.value)) + + if request is not None: + + _query_params.append(('request', request.value)) + # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None + + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['text/xml']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'text/xml' + ] + ) - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = { - '200': "str", - } + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/wcs/{workflow}?request=GetCapabilities', 'GET', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='GET', + resource_path='/wcs/{workflow}?request=GetCapabilities', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) - @validate_arguments - def wcs_describe_coverage_handler(self, workflow : Annotated[StrictStr, Field(..., description="Workflow id")], version : WcsVersion, service : WcsService, request : DescribeCoverageRequest, identifiers : StrictStr, **kwargs) -> str: # noqa: E501 - """Get WCS Coverage Description # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.wcs_describe_coverage_handler(workflow, version, service, request, identifiers, async_req=True) - >>> result = thread.get() + + @validate_call + def wcs_describe_coverage_handler( + self, + workflow: Annotated[StrictStr, Field(description="Workflow id")], + version: WcsVersion, + service: WcsService, + request: DescribeCoverageRequest, + identifiers: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """Get WCS Coverage Description + :param workflow: Workflow id (required) :type workflow: str @@ -233,32 +388,77 @@ def wcs_describe_coverage_handler(self, workflow : Annotated[StrictStr, Field(.. :type request: DescribeCoverageRequest :param identifiers: (required) :type identifiers: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: str - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the wcs_describe_coverage_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.wcs_describe_coverage_handler_with_http_info(workflow, version, service, request, identifiers, **kwargs) # noqa: E501 - - @validate_arguments - def wcs_describe_coverage_handler_with_http_info(self, workflow : Annotated[StrictStr, Field(..., description="Workflow id")], version : WcsVersion, service : WcsService, request : DescribeCoverageRequest, identifiers : StrictStr, **kwargs) -> ApiResponse: # noqa: E501 - """Get WCS Coverage Description # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.wcs_describe_coverage_handler_with_http_info(workflow, version, service, request, identifiers, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._wcs_describe_coverage_handler_serialize( + workflow=workflow, + version=version, + service=service, + request=request, + identifiers=identifiers, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def wcs_describe_coverage_handler_with_http_info( + self, + workflow: Annotated[StrictStr, Field(description="Workflow id")], + version: WcsVersion, + service: WcsService, + request: DescribeCoverageRequest, + identifiers: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """Get WCS Coverage Description + :param workflow: Workflow id (required) :type workflow: str @@ -270,128 +470,248 @@ def wcs_describe_coverage_handler_with_http_info(self, workflow : Annotated[Stri :type request: DescribeCoverageRequest :param identifiers: (required) :type identifiers: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(str, status_code(int), headers(HTTPHeaderDict)) - """ - - _params = locals() - - _all_params = [ - 'workflow', - 'version', - 'service', - 'request', - 'identifiers' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' - ] + """ # noqa: E501 + + _param = self._wcs_describe_coverage_handler_serialize( + workflow=workflow, + version=version, + service=service, + request=request, + identifiers=identifiers, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method wcs_describe_coverage_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _collection_formats = {} - # process the path parameters - _path_params = {} - if _params['workflow']: - _path_params['workflow'] = _params['workflow'] + @validate_call + def wcs_describe_coverage_handler_without_preload_content( + self, + workflow: Annotated[StrictStr, Field(description="Workflow id")], + version: WcsVersion, + service: WcsService, + request: DescribeCoverageRequest, + identifiers: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get WCS Coverage Description - # process the query parameters - _query_params = [] - if _params.get('version') is not None: # noqa: E501 - _query_params.append(('version', _params['version'].value)) + :param workflow: Workflow id (required) + :type workflow: str + :param version: (required) + :type version: WcsVersion + :param service: (required) + :type service: WcsService + :param request: (required) + :type request: DescribeCoverageRequest + :param identifiers: (required) + :type identifiers: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._wcs_describe_coverage_handler_serialize( + workflow=workflow, + version=version, + service=service, + request=request, + identifiers=identifiers, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + - if _params.get('service') is not None: # noqa: E501 - _query_params.append(('service', _params['service'].value)) + def _wcs_describe_coverage_handler_serialize( + self, + workflow, + version, + service, + request, + identifiers, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: - if _params.get('request') is not None: # noqa: E501 - _query_params.append(('request', _params['request'].value)) + _host = None + + _collection_formats: Dict[str, str] = { + } - if _params.get('identifiers') is not None: # noqa: E501 - _query_params.append(('identifiers', _params['identifiers'])) + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + # process the path parameters + if workflow is not None: + _path_params['workflow'] = workflow + # process the query parameters + if version is not None: + + _query_params.append(('version', version.value)) + + if service is not None: + + _query_params.append(('service', service.value)) + + if request is not None: + + _query_params.append(('request', request.value)) + + if identifiers is not None: + + _query_params.append(('identifiers', identifiers)) + # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None + + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['text/xml']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'text/xml' + ] + ) - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = { - '200': "str", - } + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/wcs/{workflow}?request=DescribeCoverage', 'GET', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='GET', + resource_path='/wcs/{workflow}?request=DescribeCoverage', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + + - @validate_arguments - def wcs_get_coverage_handler(self, workflow : Annotated[StrictStr, Field(..., description="Workflow id")], version : WcsVersion, service : WcsService, request : GetCoverageRequest, format : GetCoverageFormat, identifier : StrictStr, boundingbox : StrictStr, gridbasecrs : StrictStr, gridorigin : Optional[StrictStr] = None, gridoffsets : Optional[StrictStr] = None, time : Optional[StrictStr] = None, resx : Optional[Union[StrictFloat, StrictInt]] = None, resy : Optional[Union[StrictFloat, StrictInt]] = None, nodatavalue : Optional[Union[StrictFloat, StrictInt]] = None, **kwargs) -> bytearray: # noqa: E501 - """Get WCS Coverage # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True + @validate_call + def wcs_get_coverage_handler( + self, + workflow: Annotated[StrictStr, Field(description="Workflow id")], + version: WcsVersion, + service: WcsService, + request: GetCoverageRequest, + format: GetCoverageFormat, + identifier: StrictStr, + boundingbox: StrictStr, + gridbasecrs: StrictStr, + gridorigin: Optional[StrictStr] = None, + gridoffsets: Optional[StrictStr] = None, + time: Optional[StrictStr] = None, + resx: Optional[Union[StrictFloat, StrictInt]] = None, + resy: Optional[Union[StrictFloat, StrictInt]] = None, + nodatavalue: Optional[Union[StrictFloat, StrictInt]] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> List[int]: + """Get WCS Coverage - >>> thread = api.wcs_get_coverage_handler(workflow, version, service, request, format, identifier, boundingbox, gridbasecrs, gridorigin, gridoffsets, time, resx, resy, nodatavalue, async_req=True) - >>> result = thread.get() :param workflow: Workflow id (required) :type workflow: str @@ -421,32 +741,95 @@ def wcs_get_coverage_handler(self, workflow : Annotated[StrictStr, Field(..., de :type resy: float :param nodatavalue: :type nodatavalue: float - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: bytearray - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the wcs_get_coverage_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.wcs_get_coverage_handler_with_http_info(workflow, version, service, request, format, identifier, boundingbox, gridbasecrs, gridorigin, gridoffsets, time, resx, resy, nodatavalue, **kwargs) # noqa: E501 - - @validate_arguments - def wcs_get_coverage_handler_with_http_info(self, workflow : Annotated[StrictStr, Field(..., description="Workflow id")], version : WcsVersion, service : WcsService, request : GetCoverageRequest, format : GetCoverageFormat, identifier : StrictStr, boundingbox : StrictStr, gridbasecrs : StrictStr, gridorigin : Optional[StrictStr] = None, gridoffsets : Optional[StrictStr] = None, time : Optional[StrictStr] = None, resx : Optional[Union[StrictFloat, StrictInt]] = None, resy : Optional[Union[StrictFloat, StrictInt]] = None, nodatavalue : Optional[Union[StrictFloat, StrictInt]] = None, **kwargs) -> ApiResponse: # noqa: E501 - """Get WCS Coverage # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.wcs_get_coverage_handler_with_http_info(workflow, version, service, request, format, identifier, boundingbox, gridbasecrs, gridorigin, gridoffsets, time, resx, resy, nodatavalue, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._wcs_get_coverage_handler_serialize( + workflow=workflow, + version=version, + service=service, + request=request, + format=format, + identifier=identifier, + boundingbox=boundingbox, + gridbasecrs=gridbasecrs, + gridorigin=gridorigin, + gridoffsets=gridoffsets, + time=time, + resx=resx, + resy=resy, + nodatavalue=nodatavalue, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[int]", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def wcs_get_coverage_handler_with_http_info( + self, + workflow: Annotated[StrictStr, Field(description="Workflow id")], + version: WcsVersion, + service: WcsService, + request: GetCoverageRequest, + format: GetCoverageFormat, + identifier: StrictStr, + boundingbox: StrictStr, + gridbasecrs: StrictStr, + gridorigin: Optional[StrictStr] = None, + gridoffsets: Optional[StrictStr] = None, + time: Optional[StrictStr] = None, + resx: Optional[Union[StrictFloat, StrictInt]] = None, + resy: Optional[Union[StrictFloat, StrictInt]] = None, + nodatavalue: Optional[Union[StrictFloat, StrictInt]] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[List[int]]: + """Get WCS Coverage + :param workflow: Workflow id (required) :type workflow: str @@ -476,151 +859,301 @@ def wcs_get_coverage_handler_with_http_info(self, workflow : Annotated[StrictStr :type resy: float :param nodatavalue: :type nodatavalue: float - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(bytearray, status_code(int), headers(HTTPHeaderDict)) - """ - - _params = locals() - - _all_params = [ - 'workflow', - 'version', - 'service', - 'request', - 'format', - 'identifier', - 'boundingbox', - 'gridbasecrs', - 'gridorigin', - 'gridoffsets', - 'time', - 'resx', - 'resy', - 'nodatavalue' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' - ] + """ # noqa: E501 + + _param = self._wcs_get_coverage_handler_serialize( + workflow=workflow, + version=version, + service=service, + request=request, + format=format, + identifier=identifier, + boundingbox=boundingbox, + gridbasecrs=gridbasecrs, + gridorigin=gridorigin, + gridoffsets=gridoffsets, + time=time, + resx=resx, + resy=resy, + nodatavalue=nodatavalue, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method wcs_get_coverage_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] - - _collection_formats = {} - - # process the path parameters - _path_params = {} - if _params['workflow']: - _path_params['workflow'] = _params['workflow'] - - - # process the query parameters - _query_params = [] - if _params.get('version') is not None: # noqa: E501 - _query_params.append(('version', _params['version'].value)) - - if _params.get('service') is not None: # noqa: E501 - _query_params.append(('service', _params['service'].value)) - - if _params.get('request') is not None: # noqa: E501 - _query_params.append(('request', _params['request'].value)) - - if _params.get('format') is not None: # noqa: E501 - _query_params.append(('format', _params['format'].value)) - - if _params.get('identifier') is not None: # noqa: E501 - _query_params.append(('identifier', _params['identifier'])) - - if _params.get('boundingbox') is not None: # noqa: E501 - _query_params.append(('boundingbox', _params['boundingbox'])) - - if _params.get('gridbasecrs') is not None: # noqa: E501 - _query_params.append(('gridbasecrs', _params['gridbasecrs'])) + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[int]", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - if _params.get('gridorigin') is not None: # noqa: E501 - _query_params.append(('gridorigin', _params['gridorigin'])) - if _params.get('gridoffsets') is not None: # noqa: E501 - _query_params.append(('gridoffsets', _params['gridoffsets'])) + @validate_call + def wcs_get_coverage_handler_without_preload_content( + self, + workflow: Annotated[StrictStr, Field(description="Workflow id")], + version: WcsVersion, + service: WcsService, + request: GetCoverageRequest, + format: GetCoverageFormat, + identifier: StrictStr, + boundingbox: StrictStr, + gridbasecrs: StrictStr, + gridorigin: Optional[StrictStr] = None, + gridoffsets: Optional[StrictStr] = None, + time: Optional[StrictStr] = None, + resx: Optional[Union[StrictFloat, StrictInt]] = None, + resy: Optional[Union[StrictFloat, StrictInt]] = None, + nodatavalue: Optional[Union[StrictFloat, StrictInt]] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get WCS Coverage - if _params.get('time') is not None: # noqa: E501 - _query_params.append(('time', _params['time'])) - if _params.get('resx') is not None: # noqa: E501 - _query_params.append(('resx', _params['resx'])) + :param workflow: Workflow id (required) + :type workflow: str + :param version: (required) + :type version: WcsVersion + :param service: (required) + :type service: WcsService + :param request: (required) + :type request: GetCoverageRequest + :param format: (required) + :type format: GetCoverageFormat + :param identifier: (required) + :type identifier: str + :param boundingbox: (required) + :type boundingbox: str + :param gridbasecrs: (required) + :type gridbasecrs: str + :param gridorigin: + :type gridorigin: str + :param gridoffsets: + :type gridoffsets: str + :param time: + :type time: str + :param resx: + :type resx: float + :param resy: + :type resy: float + :param nodatavalue: + :type nodatavalue: float + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._wcs_get_coverage_handler_serialize( + workflow=workflow, + version=version, + service=service, + request=request, + format=format, + identifier=identifier, + boundingbox=boundingbox, + gridbasecrs=gridbasecrs, + gridorigin=gridorigin, + gridoffsets=gridoffsets, + time=time, + resx=resx, + resy=resy, + nodatavalue=nodatavalue, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - if _params.get('resy') is not None: # noqa: E501 - _query_params.append(('resy', _params['resy'])) + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[int]", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _wcs_get_coverage_handler_serialize( + self, + workflow, + version, + service, + request, + format, + identifier, + boundingbox, + gridbasecrs, + gridorigin, + gridoffsets, + time, + resx, + resy, + nodatavalue, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } - if _params.get('nodatavalue') is not None: # noqa: E501 - _query_params.append(('nodatavalue', _params['nodatavalue'])) + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + # process the path parameters + if workflow is not None: + _path_params['workflow'] = workflow + # process the query parameters + if version is not None: + + _query_params.append(('version', version.value)) + + if service is not None: + + _query_params.append(('service', service.value)) + + if request is not None: + + _query_params.append(('request', request.value)) + + if format is not None: + + _query_params.append(('format', format.value)) + + if identifier is not None: + + _query_params.append(('identifier', identifier)) + + if boundingbox is not None: + + _query_params.append(('boundingbox', boundingbox)) + + if gridbasecrs is not None: + + _query_params.append(('gridbasecrs', gridbasecrs)) + + if gridorigin is not None: + + _query_params.append(('gridorigin', gridorigin)) + + if gridoffsets is not None: + + _query_params.append(('gridoffsets', gridoffsets)) + + if time is not None: + + _query_params.append(('time', time)) + + if resx is not None: + + _query_params.append(('resx', resx)) + + if resy is not None: + + _query_params.append(('resy', resy)) + + if nodatavalue is not None: + + _query_params.append(('nodatavalue', nodatavalue)) + # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None + + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['image/png']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'image/png' + ] + ) - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = { - '200': "bytearray", - } + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/wcs/{workflow}?request=GetCoverage', 'GET', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='GET', + resource_path='/wcs/{workflow}?request=GetCoverage', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/python/geoengine_openapi_client/api/ogcwfs_api.py b/python/geoengine_openapi_client/api/ogcwfs_api.py index 08c98e08..5879772f 100644 --- a/python/geoengine_openapi_client/api/ogcwfs_api.py +++ b/python/geoengine_openapi_client/api/ogcwfs_api.py @@ -12,29 +12,23 @@ Do not edit the class manually. """ # noqa: E501 - -import re # noqa: F401 -import io import warnings - -from pydantic import validate_arguments, ValidationError - +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union from typing_extensions import Annotated -from pydantic import Field, StrictStr, conint - -from typing import Any, Optional +from pydantic import Field, StrictStr +from typing import Optional +from typing_extensions import Annotated from geoengine_openapi_client.models.geo_json import GeoJson from geoengine_openapi_client.models.get_capabilities_request import GetCapabilitiesRequest from geoengine_openapi_client.models.get_feature_request import GetFeatureRequest from geoengine_openapi_client.models.wfs_service import WfsService +from geoengine_openapi_client.models.wfs_version import WfsVersion -from geoengine_openapi_client.api_client import ApiClient +from geoengine_openapi_client.api_client import ApiClient, RequestSerialized from geoengine_openapi_client.api_response import ApiResponse -from geoengine_openapi_client.exceptions import ( # noqa: F401 - ApiTypeError, - ApiValueError -) +from geoengine_openapi_client.rest import RESTResponseType class OGCWFSApi: @@ -49,15 +43,29 @@ def __init__(self, api_client=None) -> None: api_client = ApiClient.get_default() self.api_client = api_client - @validate_arguments - def wfs_capabilities_handler(self, workflow : Annotated[StrictStr, Field(..., description="Workflow id")], version : Optional[Any], service : WfsService, request : GetCapabilitiesRequest, **kwargs) -> str: # noqa: E501 - """Get WFS Capabilities # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True + @validate_call + def wfs_capabilities_handler( + self, + workflow: Annotated[StrictStr, Field(description="Workflow id")], + version: Optional[WfsVersion], + service: WfsService, + request: GetCapabilitiesRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """Get WFS Capabilities - >>> thread = api.wfs_capabilities_handler(workflow, version, service, request, async_req=True) - >>> result = thread.get() :param workflow: Workflow id (required) :type workflow: str @@ -67,32 +75,75 @@ def wfs_capabilities_handler(self, workflow : Annotated[StrictStr, Field(..., de :type service: WfsService :param request: (required) :type request: GetCapabilitiesRequest - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: str - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the wfs_capabilities_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.wfs_capabilities_handler_with_http_info(workflow, version, service, request, **kwargs) # noqa: E501 - - @validate_arguments - def wfs_capabilities_handler_with_http_info(self, workflow : Annotated[StrictStr, Field(..., description="Workflow id")], version : Optional[Any], service : WfsService, request : GetCapabilitiesRequest, **kwargs) -> ApiResponse: # noqa: E501 - """Get WFS Capabilities # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.wfs_capabilities_handler_with_http_info(workflow, version, service, request, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._wfs_capabilities_handler_serialize( + workflow=workflow, + version=version, + service=service, + request=request, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def wfs_capabilities_handler_with_http_info( + self, + workflow: Annotated[StrictStr, Field(description="Workflow id")], + version: Optional[WfsVersion], + service: WfsService, + request: GetCapabilitiesRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """Get WFS Capabilities + :param workflow: Workflow id (required) :type workflow: str @@ -102,124 +153,233 @@ def wfs_capabilities_handler_with_http_info(self, workflow : Annotated[StrictStr :type service: WfsService :param request: (required) :type request: GetCapabilitiesRequest - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(str, status_code(int), headers(HTTPHeaderDict)) - """ - - _params = locals() - - _all_params = [ - 'workflow', - 'version', - 'service', - 'request' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' - ] + """ # noqa: E501 + + _param = self._wfs_capabilities_handler_serialize( + workflow=workflow, + version=version, + service=service, + request=request, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method wfs_capabilities_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] - _collection_formats = {} + @validate_call + def wfs_capabilities_handler_without_preload_content( + self, + workflow: Annotated[StrictStr, Field(description="Workflow id")], + version: Optional[WfsVersion], + service: WfsService, + request: GetCapabilitiesRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get WFS Capabilities - # process the path parameters - _path_params = {} - if _params['workflow']: - _path_params['workflow'] = _params['workflow'] - if _params['version']: - _path_params['version'] = _params['version'] + :param workflow: Workflow id (required) + :type workflow: str + :param version: (required) + :type version: WfsVersion + :param service: (required) + :type service: WfsService + :param request: (required) + :type request: GetCapabilitiesRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._wfs_capabilities_handler_serialize( + workflow=workflow, + version=version, + service=service, + request=request, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + - if _params['service']: - _path_params['service'] = _params['service'] + def _wfs_capabilities_handler_serialize( + self, + workflow, + version, + service, + request, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: - if _params['request']: - _path_params['request'] = _params['request'] + _host = None + _collection_formats: Dict[str, str] = { + } + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if workflow is not None: + _path_params['workflow'] = workflow + if version is not None: + _path_params['version'] = version.value + if service is not None: + _path_params['service'] = service.value + if request is not None: + _path_params['request'] = request.value # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None + + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['text/xml']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'text/xml' + ] + ) - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = { - '200': "str", - } + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/wfs/{workflow}?request=GetCapabilities', 'GET', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='GET', + resource_path='/wfs/{workflow}?request=GetCapabilities', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + - @validate_arguments - def wfs_feature_handler(self, workflow : Annotated[StrictStr, Field(..., description="Workflow id")], service : WfsService, request : GetFeatureRequest, type_names : StrictStr, bbox : StrictStr, version : Optional[Any] = None, time : Optional[StrictStr] = None, srs_name : Optional[StrictStr] = None, namespaces : Optional[StrictStr] = None, count : Optional[conint(strict=True, ge=0)] = None, sort_by : Optional[StrictStr] = None, result_type : Optional[StrictStr] = None, filter : Optional[StrictStr] = None, property_name : Optional[StrictStr] = None, query_resolution : Annotated[Optional[Any], Field(description="Vendor parameter for specifying a spatial query resolution")] = None, **kwargs) -> GeoJson: # noqa: E501 - """Get WCS Features # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.wfs_feature_handler(workflow, service, request, type_names, bbox, version, time, srs_name, namespaces, count, sort_by, result_type, filter, property_name, query_resolution, async_req=True) - >>> result = thread.get() + @validate_call + def wfs_feature_handler( + self, + workflow: Annotated[StrictStr, Field(description="Workflow id")], + service: WfsService, + request: GetFeatureRequest, + type_names: StrictStr, + bbox: StrictStr, + version: Optional[WfsVersion] = None, + time: Optional[StrictStr] = None, + srs_name: Optional[StrictStr] = None, + namespaces: Optional[StrictStr] = None, + count: Optional[Annotated[int, Field(strict=True, ge=0)]] = None, + sort_by: Optional[StrictStr] = None, + result_type: Optional[StrictStr] = None, + filter: Optional[StrictStr] = None, + property_name: Optional[StrictStr] = None, + query_resolution: Annotated[Optional[StrictStr], Field(description="Vendor parameter for specifying a spatial query resolution")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> GeoJson: + """Get WCS Features + :param workflow: Workflow id (required) :type workflow: str @@ -251,32 +411,97 @@ def wfs_feature_handler(self, workflow : Annotated[StrictStr, Field(..., descrip :type property_name: str :param query_resolution: Vendor parameter for specifying a spatial query resolution :type query_resolution: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: GeoJson - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the wfs_feature_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.wfs_feature_handler_with_http_info(workflow, service, request, type_names, bbox, version, time, srs_name, namespaces, count, sort_by, result_type, filter, property_name, query_resolution, **kwargs) # noqa: E501 - - @validate_arguments - def wfs_feature_handler_with_http_info(self, workflow : Annotated[StrictStr, Field(..., description="Workflow id")], service : WfsService, request : GetFeatureRequest, type_names : StrictStr, bbox : StrictStr, version : Optional[Any] = None, time : Optional[StrictStr] = None, srs_name : Optional[StrictStr] = None, namespaces : Optional[StrictStr] = None, count : Optional[conint(strict=True, ge=0)] = None, sort_by : Optional[StrictStr] = None, result_type : Optional[StrictStr] = None, filter : Optional[StrictStr] = None, property_name : Optional[StrictStr] = None, query_resolution : Annotated[Optional[Any], Field(description="Vendor parameter for specifying a spatial query resolution")] = None, **kwargs) -> ApiResponse: # noqa: E501 - """Get WCS Features # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.wfs_feature_handler_with_http_info(workflow, service, request, type_names, bbox, version, time, srs_name, namespaces, count, sort_by, result_type, filter, property_name, query_resolution, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._wfs_feature_handler_serialize( + workflow=workflow, + service=service, + request=request, + type_names=type_names, + bbox=bbox, + version=version, + time=time, + srs_name=srs_name, + namespaces=namespaces, + count=count, + sort_by=sort_by, + result_type=result_type, + filter=filter, + property_name=property_name, + query_resolution=query_resolution, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "GeoJson", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def wfs_feature_handler_with_http_info( + self, + workflow: Annotated[StrictStr, Field(description="Workflow id")], + service: WfsService, + request: GetFeatureRequest, + type_names: StrictStr, + bbox: StrictStr, + version: Optional[WfsVersion] = None, + time: Optional[StrictStr] = None, + srs_name: Optional[StrictStr] = None, + namespaces: Optional[StrictStr] = None, + count: Optional[Annotated[int, Field(strict=True, ge=0)]] = None, + sort_by: Optional[StrictStr] = None, + result_type: Optional[StrictStr] = None, + filter: Optional[StrictStr] = None, + property_name: Optional[StrictStr] = None, + query_resolution: Annotated[Optional[StrictStr], Field(description="Vendor parameter for specifying a spatial query resolution")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[GeoJson]: + """Get WCS Features + :param workflow: Workflow id (required) :type workflow: str @@ -308,155 +533,311 @@ def wfs_feature_handler_with_http_info(self, workflow : Annotated[StrictStr, Fie :type property_name: str :param query_resolution: Vendor parameter for specifying a spatial query resolution :type query_resolution: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(GeoJson, status_code(int), headers(HTTPHeaderDict)) - """ - - _params = locals() - - _all_params = [ - 'workflow', - 'service', - 'request', - 'type_names', - 'bbox', - 'version', - 'time', - 'srs_name', - 'namespaces', - 'count', - 'sort_by', - 'result_type', - 'filter', - 'property_name', - 'query_resolution' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' - ] + """ # noqa: E501 + + _param = self._wfs_feature_handler_serialize( + workflow=workflow, + service=service, + request=request, + type_names=type_names, + bbox=bbox, + version=version, + time=time, + srs_name=srs_name, + namespaces=namespaces, + count=count, + sort_by=sort_by, + result_type=result_type, + filter=filter, + property_name=property_name, + query_resolution=query_resolution, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method wfs_feature_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] - - _collection_formats = {} - - # process the path parameters - _path_params = {} - if _params['workflow']: - _path_params['workflow'] = _params['workflow'] - - - # process the query parameters - _query_params = [] - if _params.get('version') is not None: # noqa: E501 - _query_params.append(('version', _params['version'].value)) - - if _params.get('service') is not None: # noqa: E501 - _query_params.append(('service', _params['service'].value)) - - if _params.get('request') is not None: # noqa: E501 - _query_params.append(('request', _params['request'].value)) - - if _params.get('type_names') is not None: # noqa: E501 - _query_params.append(('typeNames', _params['type_names'])) - - if _params.get('bbox') is not None: # noqa: E501 - _query_params.append(('bbox', _params['bbox'])) - - if _params.get('time') is not None: # noqa: E501 - _query_params.append(('time', _params['time'])) - - if _params.get('srs_name') is not None: # noqa: E501 - _query_params.append(('srsName', _params['srs_name'])) - - if _params.get('namespaces') is not None: # noqa: E501 - _query_params.append(('namespaces', _params['namespaces'])) + _response_types_map: Dict[str, Optional[str]] = { + '200': "GeoJson", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - if _params.get('count') is not None: # noqa: E501 - _query_params.append(('count', _params['count'])) - if _params.get('sort_by') is not None: # noqa: E501 - _query_params.append(('sortBy', _params['sort_by'])) + @validate_call + def wfs_feature_handler_without_preload_content( + self, + workflow: Annotated[StrictStr, Field(description="Workflow id")], + service: WfsService, + request: GetFeatureRequest, + type_names: StrictStr, + bbox: StrictStr, + version: Optional[WfsVersion] = None, + time: Optional[StrictStr] = None, + srs_name: Optional[StrictStr] = None, + namespaces: Optional[StrictStr] = None, + count: Optional[Annotated[int, Field(strict=True, ge=0)]] = None, + sort_by: Optional[StrictStr] = None, + result_type: Optional[StrictStr] = None, + filter: Optional[StrictStr] = None, + property_name: Optional[StrictStr] = None, + query_resolution: Annotated[Optional[StrictStr], Field(description="Vendor parameter for specifying a spatial query resolution")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get WCS Features - if _params.get('result_type') is not None: # noqa: E501 - _query_params.append(('resultType', _params['result_type'])) - if _params.get('filter') is not None: # noqa: E501 - _query_params.append(('filter', _params['filter'])) + :param workflow: Workflow id (required) + :type workflow: str + :param service: (required) + :type service: WfsService + :param request: (required) + :type request: GetFeatureRequest + :param type_names: (required) + :type type_names: str + :param bbox: (required) + :type bbox: str + :param version: + :type version: WfsVersion + :param time: + :type time: str + :param srs_name: + :type srs_name: str + :param namespaces: + :type namespaces: str + :param count: + :type count: int + :param sort_by: + :type sort_by: str + :param result_type: + :type result_type: str + :param filter: + :type filter: str + :param property_name: + :type property_name: str + :param query_resolution: Vendor parameter for specifying a spatial query resolution + :type query_resolution: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._wfs_feature_handler_serialize( + workflow=workflow, + service=service, + request=request, + type_names=type_names, + bbox=bbox, + version=version, + time=time, + srs_name=srs_name, + namespaces=namespaces, + count=count, + sort_by=sort_by, + result_type=result_type, + filter=filter, + property_name=property_name, + query_resolution=query_resolution, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - if _params.get('property_name') is not None: # noqa: E501 - _query_params.append(('propertyName', _params['property_name'])) + _response_types_map: Dict[str, Optional[str]] = { + '200': "GeoJson", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _wfs_feature_handler_serialize( + self, + workflow, + service, + request, + type_names, + bbox, + version, + time, + srs_name, + namespaces, + count, + sort_by, + result_type, + filter, + property_name, + query_resolution, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } - if _params.get('query_resolution') is not None: # noqa: E501 - _query_params.append(('queryResolution', _params['query_resolution'])) + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + # process the path parameters + if workflow is not None: + _path_params['workflow'] = workflow + # process the query parameters + if version is not None: + + _query_params.append(('version', version.value)) + + if service is not None: + + _query_params.append(('service', service.value)) + + if request is not None: + + _query_params.append(('request', request.value)) + + if type_names is not None: + + _query_params.append(('typeNames', type_names)) + + if bbox is not None: + + _query_params.append(('bbox', bbox)) + + if time is not None: + + _query_params.append(('time', time)) + + if srs_name is not None: + + _query_params.append(('srsName', srs_name)) + + if namespaces is not None: + + _query_params.append(('namespaces', namespaces)) + + if count is not None: + + _query_params.append(('count', count)) + + if sort_by is not None: + + _query_params.append(('sortBy', sort_by)) + + if result_type is not None: + + _query_params.append(('resultType', result_type)) + + if filter is not None: + + _query_params.append(('filter', filter)) + + if property_name is not None: + + _query_params.append(('propertyName', property_name)) + + if query_resolution is not None: + + _query_params.append(('queryResolution', query_resolution)) + # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None + + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = { - '200': "GeoJson", - } + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/wfs/{workflow}?request=GetFeature', 'GET', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='GET', + resource_path='/wfs/{workflow}?request=GetFeature', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/python/geoengine_openapi_client/api/ogcwms_api.py b/python/geoengine_openapi_client/api/ogcwms_api.py index 5c2e343a..c261b889 100644 --- a/python/geoengine_openapi_client/api/ogcwms_api.py +++ b/python/geoengine_openapi_client/api/ogcwms_api.py @@ -12,31 +12,26 @@ Do not edit the class manually. """ # noqa: E501 - -import re # noqa: F401 -import io import warnings - -from pydantic import validate_arguments, ValidationError - +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union from typing_extensions import Annotated -from pydantic import Field, StrictBool, StrictStr, conint - -from typing import Any, Optional, Union +from pydantic import Field, StrictBool, StrictStr +from typing import List, Optional +from typing_extensions import Annotated +from geoengine_openapi_client.models.get_capabilities_format import GetCapabilitiesFormat from geoengine_openapi_client.models.get_capabilities_request import GetCapabilitiesRequest from geoengine_openapi_client.models.get_legend_graphic_request import GetLegendGraphicRequest +from geoengine_openapi_client.models.get_map_exception_format import GetMapExceptionFormat from geoengine_openapi_client.models.get_map_format import GetMapFormat from geoengine_openapi_client.models.get_map_request import GetMapRequest from geoengine_openapi_client.models.wms_service import WmsService from geoengine_openapi_client.models.wms_version import WmsVersion -from geoengine_openapi_client.api_client import ApiClient +from geoengine_openapi_client.api_client import ApiClient, RequestSerialized from geoengine_openapi_client.api_response import ApiResponse -from geoengine_openapi_client.exceptions import ( # noqa: F401 - ApiTypeError, - ApiValueError -) +from geoengine_openapi_client.rest import RESTResponseType class OGCWMSApi: @@ -51,15 +46,30 @@ def __init__(self, api_client=None) -> None: api_client = ApiClient.get_default() self.api_client = api_client - @validate_arguments - def wms_capabilities_handler(self, workflow : Annotated[StrictStr, Field(..., description="Workflow id")], version : Optional[Any], service : WmsService, request : GetCapabilitiesRequest, format : Optional[Any], **kwargs) -> str: # noqa: E501 - """Get WMS Capabilities # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True + @validate_call + def wms_capabilities_handler( + self, + workflow: Annotated[StrictStr, Field(description="Workflow id")], + version: Optional[WmsVersion], + service: WmsService, + request: GetCapabilitiesRequest, + format: Optional[GetCapabilitiesFormat], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """Get WMS Capabilities - >>> thread = api.wms_capabilities_handler(workflow, version, service, request, format, async_req=True) - >>> result = thread.get() :param workflow: Workflow id (required) :type workflow: str @@ -71,32 +81,77 @@ def wms_capabilities_handler(self, workflow : Annotated[StrictStr, Field(..., de :type request: GetCapabilitiesRequest :param format: (required) :type format: GetCapabilitiesFormat - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: str - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the wms_capabilities_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.wms_capabilities_handler_with_http_info(workflow, version, service, request, format, **kwargs) # noqa: E501 - - @validate_arguments - def wms_capabilities_handler_with_http_info(self, workflow : Annotated[StrictStr, Field(..., description="Workflow id")], version : Optional[Any], service : WmsService, request : GetCapabilitiesRequest, format : Optional[Any], **kwargs) -> ApiResponse: # noqa: E501 - """Get WMS Capabilities # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.wms_capabilities_handler_with_http_info(workflow, version, service, request, format, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._wms_capabilities_handler_serialize( + workflow=workflow, + version=version, + service=service, + request=request, + format=format, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def wms_capabilities_handler_with_http_info( + self, + workflow: Annotated[StrictStr, Field(description="Workflow id")], + version: Optional[WmsVersion], + service: WmsService, + request: GetCapabilitiesRequest, + format: Optional[GetCapabilitiesFormat], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """Get WMS Capabilities + :param workflow: Workflow id (required) :type workflow: str @@ -108,128 +163,231 @@ def wms_capabilities_handler_with_http_info(self, workflow : Annotated[StrictStr :type request: GetCapabilitiesRequest :param format: (required) :type format: GetCapabilitiesFormat - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(str, status_code(int), headers(HTTPHeaderDict)) - """ - - _params = locals() - - _all_params = [ - 'workflow', - 'version', - 'service', - 'request', - 'format' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' - ] + """ # noqa: E501 + + _param = self._wms_capabilities_handler_serialize( + workflow=workflow, + version=version, + service=service, + request=request, + format=format, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method wms_capabilities_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _collection_formats = {} - # process the path parameters - _path_params = {} - if _params['workflow']: - _path_params['workflow'] = _params['workflow'] + @validate_call + def wms_capabilities_handler_without_preload_content( + self, + workflow: Annotated[StrictStr, Field(description="Workflow id")], + version: Optional[WmsVersion], + service: WmsService, + request: GetCapabilitiesRequest, + format: Optional[GetCapabilitiesFormat], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get WMS Capabilities - if _params['version']: - _path_params['version'] = _params['version'] - if _params['service']: - _path_params['service'] = _params['service'] + :param workflow: Workflow id (required) + :type workflow: str + :param version: (required) + :type version: WmsVersion + :param service: (required) + :type service: WmsService + :param request: (required) + :type request: GetCapabilitiesRequest + :param format: (required) + :type format: GetCapabilitiesFormat + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._wms_capabilities_handler_serialize( + workflow=workflow, + version=version, + service=service, + request=request, + format=format, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - if _params['request']: - _path_params['request'] = _params['request'] + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _wms_capabilities_handler_serialize( + self, + workflow, + version, + service, + request, + format, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: - if _params['format']: - _path_params['format'] = _params['format'] + _host = None + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + # process the path parameters + if workflow is not None: + _path_params['workflow'] = workflow + if version is not None: + _path_params['version'] = version.value + if service is not None: + _path_params['service'] = service.value + if request is not None: + _path_params['request'] = request.value + if format is not None: + _path_params['format'] = format.value # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None + + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['text/xml']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'text/xml' + ] + ) - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = { - '200': "str", - } + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/wms/{workflow}?request=GetCapabilities', 'GET', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='GET', + resource_path='/wms/{workflow}?request=GetCapabilities', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) - @validate_arguments - def wms_legend_graphic_handler(self, workflow : Annotated[StrictStr, Field(..., description="Workflow id")], version : WmsVersion, service : WmsService, request : GetLegendGraphicRequest, layer : StrictStr, **kwargs) -> None: # noqa: E501 - """Get WMS Legend Graphic # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.wms_legend_graphic_handler(workflow, version, service, request, layer, async_req=True) - >>> result = thread.get() + + @validate_call + def wms_legend_graphic_handler( + self, + workflow: Annotated[StrictStr, Field(description="Workflow id")], + version: WmsVersion, + service: WmsService, + request: GetLegendGraphicRequest, + layer: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Get WMS Legend Graphic + :param workflow: Workflow id (required) :type workflow: str @@ -241,32 +399,77 @@ def wms_legend_graphic_handler(self, workflow : Annotated[StrictStr, Field(..., :type request: GetLegendGraphicRequest :param layer: (required) :type layer: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the wms_legend_graphic_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.wms_legend_graphic_handler_with_http_info(workflow, version, service, request, layer, **kwargs) # noqa: E501 - - @validate_arguments - def wms_legend_graphic_handler_with_http_info(self, workflow : Annotated[StrictStr, Field(..., description="Workflow id")], version : WmsVersion, service : WmsService, request : GetLegendGraphicRequest, layer : StrictStr, **kwargs) -> ApiResponse: # noqa: E501 - """Get WMS Legend Graphic # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.wms_legend_graphic_handler_with_http_info(workflow, version, service, request, layer, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._wms_legend_graphic_handler_serialize( + workflow=workflow, + version=version, + service=service, + request=request, + layer=layer, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '501': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def wms_legend_graphic_handler_with_http_info( + self, + workflow: Annotated[StrictStr, Field(description="Workflow id")], + version: WmsVersion, + service: WmsService, + request: GetLegendGraphicRequest, + layer: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Get WMS Legend Graphic + :param workflow: Workflow id (required) :type workflow: str @@ -278,122 +481,237 @@ def wms_legend_graphic_handler_with_http_info(self, workflow : Annotated[StrictS :type request: GetLegendGraphicRequest :param layer: (required) :type layer: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ - - _params = locals() - - _all_params = [ - 'workflow', - 'version', - 'service', - 'request', - 'layer' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' - ] + """ # noqa: E501 + + _param = self._wms_legend_graphic_handler_serialize( + workflow=workflow, + version=version, + service=service, + request=request, + layer=layer, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method wms_legend_graphic_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '501': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _collection_formats = {} - # process the path parameters - _path_params = {} - if _params['workflow']: - _path_params['workflow'] = _params['workflow'] + @validate_call + def wms_legend_graphic_handler_without_preload_content( + self, + workflow: Annotated[StrictStr, Field(description="Workflow id")], + version: WmsVersion, + service: WmsService, + request: GetLegendGraphicRequest, + layer: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get WMS Legend Graphic - if _params['version']: - _path_params['version'] = _params['version'] - if _params['service']: - _path_params['service'] = _params['service'] + :param workflow: Workflow id (required) + :type workflow: str + :param version: (required) + :type version: WmsVersion + :param service: (required) + :type service: WmsService + :param request: (required) + :type request: GetLegendGraphicRequest + :param layer: (required) + :type layer: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._wms_legend_graphic_handler_serialize( + workflow=workflow, + version=version, + service=service, + request=request, + layer=layer, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '501': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - if _params['request']: - _path_params['request'] = _params['request'] - if _params['layer']: - _path_params['layer'] = _params['layer'] + def _wms_legend_graphic_handler_serialize( + self, + workflow, + version, + service, + request, + layer, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + _host = None + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if workflow is not None: + _path_params['workflow'] = workflow + if version is not None: + _path_params['version'] = version.value + if service is not None: + _path_params['service'] = service.value + if request is not None: + _path_params['request'] = request.value + if layer is not None: + _path_params['layer'] = layer # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = {} - return self.api_client.call_api( - '/wms/{workflow}?request=GetLegendGraphic', 'GET', - _path_params, - _query_params, - _header_params, + + + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/wms/{workflow}?request=GetLegendGraphic', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) - @validate_arguments - def wms_map_handler(self, workflow : Annotated[StrictStr, Field(..., description="Workflow id")], version : WmsVersion, service : WmsService, request : GetMapRequest, width : conint(strict=True, ge=0), height : conint(strict=True, ge=0), bbox : StrictStr, format : GetMapFormat, layers : StrictStr, styles : StrictStr, crs : Optional[StrictStr] = None, time : Optional[StrictStr] = None, transparent : Optional[StrictBool] = None, bgcolor : Optional[StrictStr] = None, sld : Optional[StrictStr] = None, sld_body : Optional[StrictStr] = None, elevation : Optional[StrictStr] = None, exceptions : Optional[Any] = None, **kwargs) -> bytearray: # noqa: E501 - """Get WMS Map # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.wms_map_handler(workflow, version, service, request, width, height, bbox, format, layers, styles, crs, time, transparent, bgcolor, sld, sld_body, elevation, exceptions, async_req=True) - >>> result = thread.get() + + @validate_call + def wms_map_handler( + self, + workflow: Annotated[StrictStr, Field(description="Workflow id")], + version: WmsVersion, + service: WmsService, + request: GetMapRequest, + width: Annotated[int, Field(strict=True, ge=0)], + height: Annotated[int, Field(strict=True, ge=0)], + bbox: StrictStr, + format: GetMapFormat, + layers: StrictStr, + styles: StrictStr, + crs: Optional[StrictStr] = None, + time: Optional[StrictStr] = None, + transparent: Optional[StrictBool] = None, + bgcolor: Optional[StrictStr] = None, + sld: Optional[StrictStr] = None, + sld_body: Optional[StrictStr] = None, + elevation: Optional[StrictStr] = None, + exceptions: Optional[GetMapExceptionFormat] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> List[int]: + """Get WMS Map + :param workflow: Workflow id (required) :type workflow: str @@ -431,32 +749,103 @@ def wms_map_handler(self, workflow : Annotated[StrictStr, Field(..., description :type elevation: str :param exceptions: :type exceptions: GetMapExceptionFormat - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: bytearray - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the wms_map_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.wms_map_handler_with_http_info(workflow, version, service, request, width, height, bbox, format, layers, styles, crs, time, transparent, bgcolor, sld, sld_body, elevation, exceptions, **kwargs) # noqa: E501 - - @validate_arguments - def wms_map_handler_with_http_info(self, workflow : Annotated[StrictStr, Field(..., description="Workflow id")], version : WmsVersion, service : WmsService, request : GetMapRequest, width : conint(strict=True, ge=0), height : conint(strict=True, ge=0), bbox : StrictStr, format : GetMapFormat, layers : StrictStr, styles : StrictStr, crs : Optional[StrictStr] = None, time : Optional[StrictStr] = None, transparent : Optional[StrictBool] = None, bgcolor : Optional[StrictStr] = None, sld : Optional[StrictStr] = None, sld_body : Optional[StrictStr] = None, elevation : Optional[StrictStr] = None, exceptions : Optional[Any] = None, **kwargs) -> ApiResponse: # noqa: E501 - """Get WMS Map # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.wms_map_handler_with_http_info(workflow, version, service, request, width, height, bbox, format, layers, styles, crs, time, transparent, bgcolor, sld, sld_body, elevation, exceptions, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._wms_map_handler_serialize( + workflow=workflow, + version=version, + service=service, + request=request, + width=width, + height=height, + bbox=bbox, + format=format, + layers=layers, + styles=styles, + crs=crs, + time=time, + transparent=transparent, + bgcolor=bgcolor, + sld=sld, + sld_body=sld_body, + elevation=elevation, + exceptions=exceptions, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[int]", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def wms_map_handler_with_http_info( + self, + workflow: Annotated[StrictStr, Field(description="Workflow id")], + version: WmsVersion, + service: WmsService, + request: GetMapRequest, + width: Annotated[int, Field(strict=True, ge=0)], + height: Annotated[int, Field(strict=True, ge=0)], + bbox: StrictStr, + format: GetMapFormat, + layers: StrictStr, + styles: StrictStr, + crs: Optional[StrictStr] = None, + time: Optional[StrictStr] = None, + transparent: Optional[StrictBool] = None, + bgcolor: Optional[StrictStr] = None, + sld: Optional[StrictStr] = None, + sld_body: Optional[StrictStr] = None, + elevation: Optional[StrictStr] = None, + exceptions: Optional[GetMapExceptionFormat] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[List[int]]: + """Get WMS Map + :param workflow: Workflow id (required) :type workflow: str @@ -494,167 +883,341 @@ def wms_map_handler_with_http_info(self, workflow : Annotated[StrictStr, Field(. :type elevation: str :param exceptions: :type exceptions: GetMapExceptionFormat - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(bytearray, status_code(int), headers(HTTPHeaderDict)) - """ - - _params = locals() - - _all_params = [ - 'workflow', - 'version', - 'service', - 'request', - 'width', - 'height', - 'bbox', - 'format', - 'layers', - 'styles', - 'crs', - 'time', - 'transparent', - 'bgcolor', - 'sld', - 'sld_body', - 'elevation', - 'exceptions' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' - ] + """ # noqa: E501 + + _param = self._wms_map_handler_serialize( + workflow=workflow, + version=version, + service=service, + request=request, + width=width, + height=height, + bbox=bbox, + format=format, + layers=layers, + styles=styles, + crs=crs, + time=time, + transparent=transparent, + bgcolor=bgcolor, + sld=sld, + sld_body=sld_body, + elevation=elevation, + exceptions=exceptions, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method wms_map_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] - - _collection_formats = {} - - # process the path parameters - _path_params = {} - if _params['workflow']: - _path_params['workflow'] = _params['workflow'] - - - # process the query parameters - _query_params = [] - if _params.get('version') is not None: # noqa: E501 - _query_params.append(('version', _params['version'].value)) - - if _params.get('service') is not None: # noqa: E501 - _query_params.append(('service', _params['service'].value)) - - if _params.get('request') is not None: # noqa: E501 - _query_params.append(('request', _params['request'].value)) - - if _params.get('width') is not None: # noqa: E501 - _query_params.append(('width', _params['width'])) - - if _params.get('height') is not None: # noqa: E501 - _query_params.append(('height', _params['height'])) - - if _params.get('bbox') is not None: # noqa: E501 - _query_params.append(('bbox', _params['bbox'])) - - if _params.get('format') is not None: # noqa: E501 - _query_params.append(('format', _params['format'].value)) - - if _params.get('layers') is not None: # noqa: E501 - _query_params.append(('layers', _params['layers'])) - - if _params.get('crs') is not None: # noqa: E501 - _query_params.append(('crs', _params['crs'])) - - if _params.get('styles') is not None: # noqa: E501 - _query_params.append(('styles', _params['styles'])) - - if _params.get('time') is not None: # noqa: E501 - _query_params.append(('time', _params['time'])) + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[int]", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - if _params.get('transparent') is not None: # noqa: E501 - _query_params.append(('transparent', _params['transparent'])) - if _params.get('bgcolor') is not None: # noqa: E501 - _query_params.append(('bgcolor', _params['bgcolor'])) + @validate_call + def wms_map_handler_without_preload_content( + self, + workflow: Annotated[StrictStr, Field(description="Workflow id")], + version: WmsVersion, + service: WmsService, + request: GetMapRequest, + width: Annotated[int, Field(strict=True, ge=0)], + height: Annotated[int, Field(strict=True, ge=0)], + bbox: StrictStr, + format: GetMapFormat, + layers: StrictStr, + styles: StrictStr, + crs: Optional[StrictStr] = None, + time: Optional[StrictStr] = None, + transparent: Optional[StrictBool] = None, + bgcolor: Optional[StrictStr] = None, + sld: Optional[StrictStr] = None, + sld_body: Optional[StrictStr] = None, + elevation: Optional[StrictStr] = None, + exceptions: Optional[GetMapExceptionFormat] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get WMS Map - if _params.get('sld') is not None: # noqa: E501 - _query_params.append(('sld', _params['sld'])) - if _params.get('sld_body') is not None: # noqa: E501 - _query_params.append(('sld_body', _params['sld_body'])) + :param workflow: Workflow id (required) + :type workflow: str + :param version: (required) + :type version: WmsVersion + :param service: (required) + :type service: WmsService + :param request: (required) + :type request: GetMapRequest + :param width: (required) + :type width: int + :param height: (required) + :type height: int + :param bbox: (required) + :type bbox: str + :param format: (required) + :type format: GetMapFormat + :param layers: (required) + :type layers: str + :param styles: (required) + :type styles: str + :param crs: + :type crs: str + :param time: + :type time: str + :param transparent: + :type transparent: bool + :param bgcolor: + :type bgcolor: str + :param sld: + :type sld: str + :param sld_body: + :type sld_body: str + :param elevation: + :type elevation: str + :param exceptions: + :type exceptions: GetMapExceptionFormat + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._wms_map_handler_serialize( + workflow=workflow, + version=version, + service=service, + request=request, + width=width, + height=height, + bbox=bbox, + format=format, + layers=layers, + styles=styles, + crs=crs, + time=time, + transparent=transparent, + bgcolor=bgcolor, + sld=sld, + sld_body=sld_body, + elevation=elevation, + exceptions=exceptions, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - if _params.get('elevation') is not None: # noqa: E501 - _query_params.append(('elevation', _params['elevation'])) + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[int]", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _wms_map_handler_serialize( + self, + workflow, + version, + service, + request, + width, + height, + bbox, + format, + layers, + styles, + crs, + time, + transparent, + bgcolor, + sld, + sld_body, + elevation, + exceptions, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } - if _params.get('exceptions') is not None: # noqa: E501 - _query_params.append(('exceptions', _params['exceptions'].value)) + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + # process the path parameters + if workflow is not None: + _path_params['workflow'] = workflow + # process the query parameters + if version is not None: + + _query_params.append(('version', version.value)) + + if service is not None: + + _query_params.append(('service', service.value)) + + if request is not None: + + _query_params.append(('request', request.value)) + + if width is not None: + + _query_params.append(('width', width)) + + if height is not None: + + _query_params.append(('height', height)) + + if bbox is not None: + + _query_params.append(('bbox', bbox)) + + if format is not None: + + _query_params.append(('format', format.value)) + + if layers is not None: + + _query_params.append(('layers', layers)) + + if crs is not None: + + _query_params.append(('crs', crs)) + + if styles is not None: + + _query_params.append(('styles', styles)) + + if time is not None: + + _query_params.append(('time', time)) + + if transparent is not None: + + _query_params.append(('transparent', transparent)) + + if bgcolor is not None: + + _query_params.append(('bgcolor', bgcolor)) + + if sld is not None: + + _query_params.append(('sld', sld)) + + if sld_body is not None: + + _query_params.append(('sld_body', sld_body)) + + if elevation is not None: + + _query_params.append(('elevation', elevation)) + + if exceptions is not None: + + _query_params.append(('exceptions', exceptions.value)) + # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None + + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['image/png']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'image/png' + ] + ) - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = { - '200': "bytearray", - } + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/wms/{workflow}?request=GetMap', 'GET', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='GET', + resource_path='/wms/{workflow}?request=GetMap', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/python/geoengine_openapi_client/api/permissions_api.py b/python/geoengine_openapi_client/api/permissions_api.py index 1ed22602..384bac8a 100644 --- a/python/geoengine_openapi_client/api/permissions_api.py +++ b/python/geoengine_openapi_client/api/permissions_api.py @@ -12,27 +12,20 @@ Do not edit the class manually. """ # noqa: E501 - -import re # noqa: F401 -import io import warnings - -from pydantic import validate_arguments, ValidationError - +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union from typing_extensions import Annotated -from pydantic import Field, StrictStr, conint +from pydantic import Field, StrictStr from typing import List - +from typing_extensions import Annotated from geoengine_openapi_client.models.permission_listing import PermissionListing from geoengine_openapi_client.models.permission_request import PermissionRequest -from geoengine_openapi_client.api_client import ApiClient +from geoengine_openapi_client.api_client import ApiClient, RequestSerialized from geoengine_openapi_client.api_response import ApiResponse -from geoengine_openapi_client.exceptions import ( # noqa: F401 - ApiTypeError, - ApiValueError -) +from geoengine_openapi_client.rest import RESTResponseType class PermissionsApi: @@ -47,154 +40,293 @@ def __init__(self, api_client=None) -> None: api_client = ApiClient.get_default() self.api_client = api_client - @validate_arguments - def add_permission_handler(self, permission_request : PermissionRequest, **kwargs) -> None: # noqa: E501 - """Adds a new permission. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True + @validate_call + def add_permission_handler( + self, + permission_request: PermissionRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Adds a new permission. - >>> thread = api.add_permission_handler(permission_request, async_req=True) - >>> result = thread.get() :param permission_request: (required) :type permission_request: PermissionRequest - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the add_permission_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.add_permission_handler_with_http_info(permission_request, **kwargs) # noqa: E501 - - @validate_arguments - def add_permission_handler_with_http_info(self, permission_request : PermissionRequest, **kwargs) -> ApiResponse: # noqa: E501 - """Adds a new permission. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.add_permission_handler_with_http_info(permission_request, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._add_permission_handler_serialize( + permission_request=permission_request, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def add_permission_handler_with_http_info( + self, + permission_request: PermissionRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Adds a new permission. + :param permission_request: (required) :type permission_request: PermissionRequest - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ + """ # noqa: E501 + + _param = self._add_permission_handler_serialize( + permission_request=permission_request, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _params = locals() - _all_params = [ - 'permission_request' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + @validate_call + def add_permission_handler_without_preload_content( + self, + permission_request: PermissionRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Adds a new permission. + + + :param permission_request: (required) + :type permission_request: PermissionRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_permission_handler_serialize( + permission_request=permission_request, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method add_permission_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} + def _add_permission_handler_serialize( + self, + permission_request, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None - if _params['permission_request'] is not None: - _body_params = _params['permission_request'] + if permission_request is not None: + _body_params = permission_request + + # set the HTTP header `Content-Type` - _content_types_list = _params.get('_content_type', - self.api_client.select_header_content_type( - ['application/json'])) - if _content_types_list: - _header_params['Content-Type'] = _content_types_list + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - - _response_types_map = {} + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/permissions', 'PUT', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='PUT', + resource_path='/permissions', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) - @validate_arguments - def get_resource_permissions_handler(self, resource_type : Annotated[StrictStr, Field(..., description="Resource Type")], resource_id : Annotated[StrictStr, Field(..., description="Resource Id")], limit : conint(strict=True, ge=0), offset : conint(strict=True, ge=0), **kwargs) -> List[PermissionListing]: # noqa: E501 - """Lists permission for a given resource. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_resource_permissions_handler(resource_type, resource_id, limit, offset, async_req=True) - >>> result = thread.get() + + @validate_call + def get_resource_permissions_handler( + self, + resource_type: Annotated[StrictStr, Field(description="Resource Type")], + resource_id: Annotated[StrictStr, Field(description="Resource Id")], + limit: Annotated[int, Field(strict=True, ge=0)], + offset: Annotated[int, Field(strict=True, ge=0)], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> List[PermissionListing]: + """Lists permission for a given resource. + :param resource_type: Resource Type (required) :type resource_type: str @@ -204,32 +336,75 @@ def get_resource_permissions_handler(self, resource_type : Annotated[StrictStr, :type limit: int :param offset: (required) :type offset: int - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: List[PermissionListing] - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the get_resource_permissions_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.get_resource_permissions_handler_with_http_info(resource_type, resource_id, limit, offset, **kwargs) # noqa: E501 - - @validate_arguments - def get_resource_permissions_handler_with_http_info(self, resource_type : Annotated[StrictStr, Field(..., description="Resource Type")], resource_id : Annotated[StrictStr, Field(..., description="Resource Id")], limit : conint(strict=True, ge=0), offset : conint(strict=True, ge=0), **kwargs) -> ApiResponse: # noqa: E501 - """Lists permission for a given resource. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.get_resource_permissions_handler_with_http_info(resource_type, resource_id, limit, offset, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._get_resource_permissions_handler_serialize( + resource_type=resource_type, + resource_id=resource_id, + limit=limit, + offset=offset, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[PermissionListing]", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_resource_permissions_handler_with_http_info( + self, + resource_type: Annotated[StrictStr, Field(description="Resource Type")], + resource_id: Annotated[StrictStr, Field(description="Resource Id")], + limit: Annotated[int, Field(strict=True, ge=0)], + offset: Annotated[int, Field(strict=True, ge=0)], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[List[PermissionListing]]: + """Lists permission for a given resource. + :param resource_type: Resource Type (required) :type resource_type: str @@ -239,250 +414,463 @@ def get_resource_permissions_handler_with_http_info(self, resource_type : Annota :type limit: int :param offset: (required) :type offset: int - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(List[PermissionListing], status_code(int), headers(HTTPHeaderDict)) - """ - - _params = locals() - - _all_params = [ - 'resource_type', - 'resource_id', - 'limit', - 'offset' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' - ] + """ # noqa: E501 + + _param = self._get_resource_permissions_handler_serialize( + resource_type=resource_type, + resource_id=resource_id, + limit=limit, + offset=offset, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method get_resource_permissions_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[PermissionListing]", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _collection_formats = {} - # process the path parameters - _path_params = {} - if _params['resource_type']: - _path_params['resource_type'] = _params['resource_type'] + @validate_call + def get_resource_permissions_handler_without_preload_content( + self, + resource_type: Annotated[StrictStr, Field(description="Resource Type")], + resource_id: Annotated[StrictStr, Field(description="Resource Id")], + limit: Annotated[int, Field(strict=True, ge=0)], + offset: Annotated[int, Field(strict=True, ge=0)], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Lists permission for a given resource. + + + :param resource_type: Resource Type (required) + :type resource_type: str + :param resource_id: Resource Id (required) + :type resource_id: str + :param limit: (required) + :type limit: int + :param offset: (required) + :type offset: int + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_resource_permissions_handler_serialize( + resource_type=resource_type, + resource_id=resource_id, + limit=limit, + offset=offset, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[PermissionListing]", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - if _params['resource_id']: - _path_params['resource_id'] = _params['resource_id'] + def _get_resource_permissions_handler_serialize( + self, + resource_type, + resource_id, + limit, + offset, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: - # process the query parameters - _query_params = [] - if _params.get('limit') is not None: # noqa: E501 - _query_params.append(('limit', _params['limit'])) + _host = None + + _collection_formats: Dict[str, str] = { + } - if _params.get('offset') is not None: # noqa: E501 - _query_params.append(('offset', _params['offset'])) + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + # process the path parameters + if resource_type is not None: + _path_params['resource_type'] = resource_type + if resource_id is not None: + _path_params['resource_id'] = resource_id + # process the query parameters + if limit is not None: + + _query_params.append(('limit', limit)) + + if offset is not None: + + _query_params.append(('offset', offset)) + # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None + + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = { - '200': "List[PermissionListing]", - } + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/permissions/resources/{resource_type}/{resource_id}', 'GET', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='GET', + resource_path='/permissions/resources/{resource_type}/{resource_id}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) - @validate_arguments - def remove_permission_handler(self, permission_request : PermissionRequest, **kwargs) -> None: # noqa: E501 - """Removes an existing permission. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.remove_permission_handler(permission_request, async_req=True) - >>> result = thread.get() + + @validate_call + def remove_permission_handler( + self, + permission_request: PermissionRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Removes an existing permission. + :param permission_request: (required) :type permission_request: PermissionRequest - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the remove_permission_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.remove_permission_handler_with_http_info(permission_request, **kwargs) # noqa: E501 - - @validate_arguments - def remove_permission_handler_with_http_info(self, permission_request : PermissionRequest, **kwargs) -> ApiResponse: # noqa: E501 - """Removes an existing permission. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.remove_permission_handler_with_http_info(permission_request, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._remove_permission_handler_serialize( + permission_request=permission_request, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def remove_permission_handler_with_http_info( + self, + permission_request: PermissionRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Removes an existing permission. + :param permission_request: (required) :type permission_request: PermissionRequest - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ + """ # noqa: E501 + + _param = self._remove_permission_handler_serialize( + permission_request=permission_request, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - _params = locals() + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _all_params = [ - 'permission_request' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + + @validate_call + def remove_permission_handler_without_preload_content( + self, + permission_request: PermissionRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Removes an existing permission. + + + :param permission_request: (required) + :type permission_request: PermissionRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._remove_permission_handler_serialize( + permission_request=permission_request, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method remove_permission_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} + def _remove_permission_handler_serialize( + self, + permission_request, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + # process the path parameters # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None - if _params['permission_request'] is not None: - _body_params = _params['permission_request'] + if permission_request is not None: + _body_params = permission_request + + # set the HTTP header `Content-Type` - _content_types_list = _params.get('_content_type', - self.api_client.select_header_content_type( - ['application/json'])) - if _content_types_list: - _header_params['Content-Type'] = _content_types_list + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - - _response_types_map = {} + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/permissions', 'DELETE', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='DELETE', + resource_path='/permissions', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/python/geoengine_openapi_client/api/plots_api.py b/python/geoengine_openapi_client/api/plots_api.py index 36189e66..cca1138d 100644 --- a/python/geoengine_openapi_client/api/plots_api.py +++ b/python/geoengine_openapi_client/api/plots_api.py @@ -12,26 +12,19 @@ Do not edit the class manually. """ # noqa: E501 - -import re # noqa: F401 -import io import warnings - -from pydantic import validate_arguments, ValidationError - +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union from typing_extensions import Annotated -from pydantic import Field, StrictStr +from pydantic import Field, StrictStr from typing import Optional - +from typing_extensions import Annotated from geoengine_openapi_client.models.wrapped_plot_output import WrappedPlotOutput -from geoengine_openapi_client.api_client import ApiClient +from geoengine_openapi_client.api_client import ApiClient, RequestSerialized from geoengine_openapi_client.api_response import ApiResponse -from geoengine_openapi_client.exceptions import ( # noqa: F401 - ApiTypeError, - ApiValueError -) +from geoengine_openapi_client.rest import RESTResponseType class PlotsApi: @@ -46,16 +39,31 @@ def __init__(self, api_client=None) -> None: api_client = ApiClient.get_default() self.api_client = api_client - @validate_arguments - def get_plot_handler(self, bbox : StrictStr, time : StrictStr, spatial_resolution : StrictStr, id : Annotated[StrictStr, Field(..., description="Workflow id")], crs : Optional[StrictStr] = None, **kwargs) -> WrappedPlotOutput: # noqa: E501 - """Generates a plot. # noqa: E501 - # Example 1. Upload the file `plain_data.csv` with the following content: ```csv a 1 2 ``` 2. Create a dataset from it using the \"Plain Data\" example at `/dataset`. 3. Create a statistics workflow using the \"Statistics Plot\" example at `/workflow`. 4. Generate the plot with this handler. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True + @validate_call + def get_plot_handler( + self, + bbox: StrictStr, + time: StrictStr, + spatial_resolution: StrictStr, + id: Annotated[StrictStr, Field(description="Workflow id")], + crs: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> WrappedPlotOutput: + """Generates a plot. - >>> thread = api.get_plot_handler(bbox, time, spatial_resolution, id, crs, async_req=True) - >>> result = thread.get() + # Example 1. Upload the file `plain_data.csv` with the following content: ```csv a 1 2 ``` 2. Create a dataset from it using the \"Plain Data\" example at `/dataset`. 3. Create a statistics workflow using the \"Statistics Plot\" example at `/workflow`. 4. Generate the plot with this handler. :param bbox: (required) :type bbox: str @@ -67,33 +75,78 @@ def get_plot_handler(self, bbox : StrictStr, time : StrictStr, spatial_resolutio :type id: str :param crs: :type crs: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: WrappedPlotOutput - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the get_plot_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.get_plot_handler_with_http_info(bbox, time, spatial_resolution, id, crs, **kwargs) # noqa: E501 - - @validate_arguments - def get_plot_handler_with_http_info(self, bbox : StrictStr, time : StrictStr, spatial_resolution : StrictStr, id : Annotated[StrictStr, Field(..., description="Workflow id")], crs : Optional[StrictStr] = None, **kwargs) -> ApiResponse: # noqa: E501 - """Generates a plot. # noqa: E501 - - # Example 1. Upload the file `plain_data.csv` with the following content: ```csv a 1 2 ``` 2. Create a dataset from it using the \"Plain Data\" example at `/dataset`. 3. Create a statistics workflow using the \"Statistics Plot\" example at `/workflow`. 4. Generate the plot with this handler. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.get_plot_handler_with_http_info(bbox, time, spatial_resolution, id, crs, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._get_plot_handler_serialize( + bbox=bbox, + time=time, + spatial_resolution=spatial_resolution, + id=id, + crs=crs, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "WrappedPlotOutput", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_plot_handler_with_http_info( + self, + bbox: StrictStr, + time: StrictStr, + spatial_resolution: StrictStr, + id: Annotated[StrictStr, Field(description="Workflow id")], + crs: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[WrappedPlotOutput]: + """Generates a plot. + + # Example 1. Upload the file `plain_data.csv` with the following content: ```csv a 1 2 ``` 2. Create a dataset from it using the \"Plain Data\" example at `/dataset`. 3. Create a statistics workflow using the \"Statistics Plot\" example at `/workflow`. 4. Generate the plot with this handler. :param bbox: (required) :type bbox: str @@ -105,115 +158,212 @@ def get_plot_handler_with_http_info(self, bbox : StrictStr, time : StrictStr, sp :type id: str :param crs: :type crs: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(WrappedPlotOutput, status_code(int), headers(HTTPHeaderDict)) - """ - - _params = locals() - - _all_params = [ - 'bbox', - 'time', - 'spatial_resolution', - 'id', - 'crs' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' - ] + """ # noqa: E501 + + _param = self._get_plot_handler_serialize( + bbox=bbox, + time=time, + spatial_resolution=spatial_resolution, + id=id, + crs=crs, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method get_plot_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': "WrappedPlotOutput", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _collection_formats = {} - # process the path parameters - _path_params = {} - if _params['id']: - _path_params['id'] = _params['id'] + @validate_call + def get_plot_handler_without_preload_content( + self, + bbox: StrictStr, + time: StrictStr, + spatial_resolution: StrictStr, + id: Annotated[StrictStr, Field(description="Workflow id")], + crs: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Generates a plot. + # Example 1. Upload the file `plain_data.csv` with the following content: ```csv a 1 2 ``` 2. Create a dataset from it using the \"Plain Data\" example at `/dataset`. 3. Create a statistics workflow using the \"Statistics Plot\" example at `/workflow`. 4. Generate the plot with this handler. - # process the query parameters - _query_params = [] - if _params.get('bbox') is not None: # noqa: E501 - _query_params.append(('bbox', _params['bbox'])) + :param bbox: (required) + :type bbox: str + :param time: (required) + :type time: str + :param spatial_resolution: (required) + :type spatial_resolution: str + :param id: Workflow id (required) + :type id: str + :param crs: + :type crs: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_plot_handler_serialize( + bbox=bbox, + time=time, + spatial_resolution=spatial_resolution, + id=id, + crs=crs, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - if _params.get('crs') is not None: # noqa: E501 - _query_params.append(('crs', _params['crs'])) + _response_types_map: Dict[str, Optional[str]] = { + '200': "WrappedPlotOutput", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - if _params.get('time') is not None: # noqa: E501 - _query_params.append(('time', _params['time'])) - if _params.get('spatial_resolution') is not None: # noqa: E501 - _query_params.append(('spatialResolution', _params['spatial_resolution'])) + def _get_plot_handler_serialize( + self, + bbox, + time, + spatial_resolution, + id, + crs, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if id is not None: + _path_params['id'] = id + # process the query parameters + if bbox is not None: + + _query_params.append(('bbox', bbox)) + + if crs is not None: + + _query_params.append(('crs', crs)) + + if time is not None: + + _query_params.append(('time', time)) + + if spatial_resolution is not None: + + _query_params.append(('spatialResolution', spatial_resolution)) + # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None + + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = { - '200': "WrappedPlotOutput", - } + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/plot/{id}', 'GET', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='GET', + resource_path='/plot/{id}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/python/geoengine_openapi_client/api/projects_api.py b/python/geoengine_openapi_client/api/projects_api.py index 99ed0a52..cca9c9eb 100644 --- a/python/geoengine_openapi_client/api/projects_api.py +++ b/python/geoengine_openapi_client/api/projects_api.py @@ -12,32 +12,25 @@ Do not edit the class manually. """ # noqa: E501 - -import re # noqa: F401 -import io import warnings - -from pydantic import validate_arguments, ValidationError - +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union from typing_extensions import Annotated -from pydantic import Field, StrictStr, conint +from pydantic import Field, StrictStr from typing import List - -from geoengine_openapi_client.models.add_collection200_response import AddCollection200Response +from typing_extensions import Annotated from geoengine_openapi_client.models.create_project import CreateProject +from geoengine_openapi_client.models.inline_object2 import InlineObject2 from geoengine_openapi_client.models.order_by import OrderBy from geoengine_openapi_client.models.project import Project from geoengine_openapi_client.models.project_listing import ProjectListing from geoengine_openapi_client.models.project_version import ProjectVersion from geoengine_openapi_client.models.update_project import UpdateProject -from geoengine_openapi_client.api_client import ApiClient +from geoengine_openapi_client.api_client import ApiClient, RequestSerialized from geoengine_openapi_client.api_response import ApiResponse -from geoengine_openapi_client.exceptions import ( # noqa: F401 - ApiTypeError, - ApiValueError -) +from geoengine_openapi_client.rest import RESTResponseType class ProjectsApi: @@ -52,292 +45,550 @@ def __init__(self, api_client=None) -> None: api_client = ApiClient.get_default() self.api_client = api_client - @validate_arguments - def create_project_handler(self, create_project : CreateProject, **kwargs) -> AddCollection200Response: # noqa: E501 - """Create a new project for the user. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True + @validate_call + def create_project_handler( + self, + create_project: CreateProject, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> InlineObject2: + """Create a new project for the user. - >>> thread = api.create_project_handler(create_project, async_req=True) - >>> result = thread.get() :param create_project: (required) :type create_project: CreateProject - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: AddCollection200Response - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the create_project_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.create_project_handler_with_http_info(create_project, **kwargs) # noqa: E501 - - @validate_arguments - def create_project_handler_with_http_info(self, create_project : CreateProject, **kwargs) -> ApiResponse: # noqa: E501 - """Create a new project for the user. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.create_project_handler_with_http_info(create_project, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._create_project_handler_serialize( + create_project=create_project, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "InlineObject2", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def create_project_handler_with_http_info( + self, + create_project: CreateProject, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[InlineObject2]: + """Create a new project for the user. + :param create_project: (required) :type create_project: CreateProject - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(AddCollection200Response, status_code(int), headers(HTTPHeaderDict)) - """ + """ # noqa: E501 + + _param = self._create_project_handler_serialize( + create_project=create_project, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - _params = locals() + _response_types_map: Dict[str, Optional[str]] = { + '200': "InlineObject2", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _all_params = [ - 'create_project' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + + @validate_call + def create_project_handler_without_preload_content( + self, + create_project: CreateProject, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Create a new project for the user. + + + :param create_project: (required) + :type create_project: CreateProject + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_project_handler_serialize( + create_project=create_project, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method create_project_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': "InlineObject2", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} + def _create_project_handler_serialize( + self, + create_project, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None - if _params['create_project'] is not None: - _body_params = _params['create_project'] + if create_project is not None: + _body_params = create_project + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) # set the HTTP header `Content-Type` - _content_types_list = _params.get('_content_type', - self.api_client.select_header_content_type( - ['application/json'])) - if _content_types_list: - _header_params['Content-Type'] = _content_types_list + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - - _response_types_map = { - '200': "AddCollection200Response", - } + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/project', 'POST', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='POST', + resource_path='/project', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) - @validate_arguments - def delete_project_handler(self, project : Annotated[StrictStr, Field(..., description="Project id")], **kwargs) -> None: # noqa: E501 - """Deletes a project. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.delete_project_handler(project, async_req=True) - >>> result = thread.get() + + @validate_call + def delete_project_handler( + self, + project: Annotated[StrictStr, Field(description="Project id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Deletes a project. + :param project: Project id (required) :type project: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the delete_project_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.delete_project_handler_with_http_info(project, **kwargs) # noqa: E501 - - @validate_arguments - def delete_project_handler_with_http_info(self, project : Annotated[StrictStr, Field(..., description="Project id")], **kwargs) -> ApiResponse: # noqa: E501 - """Deletes a project. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.delete_project_handler_with_http_info(project, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._delete_project_handler_serialize( + project=project, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def delete_project_handler_with_http_info( + self, + project: Annotated[StrictStr, Field(description="Project id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Deletes a project. + :param project: Project id (required) :type project: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ + """ # noqa: E501 + + _param = self._delete_project_handler_serialize( + project=project, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - _params = locals() + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _all_params = [ - 'project' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + + @validate_call + def delete_project_handler_without_preload_content( + self, + project: Annotated[StrictStr, Field(description="Project id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Deletes a project. + + + :param project: Project id (required) + :type project: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_project_handler_serialize( + project=project, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method delete_project_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} - if _params['project']: - _path_params['project'] = _params['project'] + def _delete_project_handler_serialize( + self, + project, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + # process the path parameters + if project is not None: + _path_params['project'] = project # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = {} - return self.api_client.call_api( - '/project/{project}', 'DELETE', - _path_params, - _query_params, - _header_params, + + + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/project/{project}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) - @validate_arguments - def list_projects_handler(self, order : OrderBy, offset : conint(strict=True, ge=0), limit : conint(strict=True, ge=0), **kwargs) -> List[ProjectListing]: # noqa: E501 - """List all projects accessible to the user that match the selected criteria. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_projects_handler(order, offset, limit, async_req=True) - >>> result = thread.get() + + @validate_call + def list_projects_handler( + self, + order: OrderBy, + offset: Annotated[int, Field(strict=True, ge=0)], + limit: Annotated[int, Field(strict=True, ge=0)], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> List[ProjectListing]: + """List all projects accessible to the user that match the selected criteria. + :param order: (required) :type order: OrderBy @@ -345,32 +596,73 @@ def list_projects_handler(self, order : OrderBy, offset : conint(strict=True, ge :type offset: int :param limit: (required) :type limit: int - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: List[ProjectListing] - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the list_projects_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.list_projects_handler_with_http_info(order, offset, limit, **kwargs) # noqa: E501 - - @validate_arguments - def list_projects_handler_with_http_info(self, order : OrderBy, offset : conint(strict=True, ge=0), limit : conint(strict=True, ge=0), **kwargs) -> ApiResponse: # noqa: E501 - """List all projects accessible to the user that match the selected criteria. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.list_projects_handler_with_http_info(order, offset, limit, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._list_projects_handler_serialize( + order=order, + offset=offset, + limit=limit, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[ProjectListing]", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def list_projects_handler_with_http_info( + self, + order: OrderBy, + offset: Annotated[int, Field(strict=True, ge=0)], + limit: Annotated[int, Field(strict=True, ge=0)], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[List[ProjectListing]]: + """List all projects accessible to the user that match the selected criteria. + :param order: (required) :type order: OrderBy @@ -378,678 +670,1255 @@ def list_projects_handler_with_http_info(self, order : OrderBy, offset : conint( :type offset: int :param limit: (required) :type limit: int - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(List[ProjectListing], status_code(int), headers(HTTPHeaderDict)) - """ + """ # noqa: E501 + + _param = self._list_projects_handler_serialize( + order=order, + offset=offset, + limit=limit, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[ProjectListing]", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _params = locals() - _all_params = [ - 'order', - 'offset', - 'limit' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + @validate_call + def list_projects_handler_without_preload_content( + self, + order: OrderBy, + offset: Annotated[int, Field(strict=True, ge=0)], + limit: Annotated[int, Field(strict=True, ge=0)], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """List all projects accessible to the user that match the selected criteria. + + + :param order: (required) + :type order: OrderBy + :param offset: (required) + :type offset: int + :param limit: (required) + :type limit: int + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_projects_handler_serialize( + order=order, + offset=offset, + limit=limit, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method list_projects_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[ProjectListing]", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} - if _params['order']: - _path_params['order'] = _params['order'] + def _list_projects_handler_serialize( + self, + order, + offset, + limit, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: - if _params['offset']: - _path_params['offset'] = _params['offset'] + _host = None - if _params['limit']: - _path_params['limit'] = _params['limit'] + _collection_formats: Dict[str, str] = { + } + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + # process the path parameters + if order is not None: + _path_params['order'] = order.value + if offset is not None: + _path_params['offset'] = offset + if limit is not None: + _path_params['limit'] = limit # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None + + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = { - '200': "List[ProjectListing]", - } + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/projects', 'GET', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='GET', + resource_path='/projects', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) - @validate_arguments - def load_project_latest_handler(self, project : Annotated[StrictStr, Field(..., description="Project id")], **kwargs) -> Project: # noqa: E501 - """Retrieves details about the latest version of a project. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.load_project_latest_handler(project, async_req=True) - >>> result = thread.get() + + @validate_call + def load_project_latest_handler( + self, + project: Annotated[StrictStr, Field(description="Project id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Project: + """Retrieves details about the latest version of a project. + :param project: Project id (required) :type project: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: Project - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the load_project_latest_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.load_project_latest_handler_with_http_info(project, **kwargs) # noqa: E501 - - @validate_arguments - def load_project_latest_handler_with_http_info(self, project : Annotated[StrictStr, Field(..., description="Project id")], **kwargs) -> ApiResponse: # noqa: E501 - """Retrieves details about the latest version of a project. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.load_project_latest_handler_with_http_info(project, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._load_project_latest_handler_serialize( + project=project, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Project", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def load_project_latest_handler_with_http_info( + self, + project: Annotated[StrictStr, Field(description="Project id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[Project]: + """Retrieves details about the latest version of a project. + :param project: Project id (required) :type project: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(Project, status_code(int), headers(HTTPHeaderDict)) - """ + """ # noqa: E501 + + _param = self._load_project_latest_handler_serialize( + project=project, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - _params = locals() + _response_types_map: Dict[str, Optional[str]] = { + '200': "Project", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _all_params = [ - 'project' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + + @validate_call + def load_project_latest_handler_without_preload_content( + self, + project: Annotated[StrictStr, Field(description="Project id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Retrieves details about the latest version of a project. + + + :param project: Project id (required) + :type project: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._load_project_latest_handler_serialize( + project=project, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method load_project_latest_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': "Project", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} - if _params['project']: - _path_params['project'] = _params['project'] + def _load_project_latest_handler_serialize( + self, + project, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + _host = None + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if project is not None: + _path_params['project'] = project # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None + + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = { - '200': "Project", - } + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/project/{project}', 'GET', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='GET', + resource_path='/project/{project}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + + - @validate_arguments - def load_project_version_handler(self, project : Annotated[StrictStr, Field(..., description="Project id")], version : Annotated[StrictStr, Field(..., description="Version id")], **kwargs) -> Project: # noqa: E501 - """Retrieves details about the given version of a project. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True + @validate_call + def load_project_version_handler( + self, + project: Annotated[StrictStr, Field(description="Project id")], + version: Annotated[StrictStr, Field(description="Version id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Project: + """Retrieves details about the given version of a project. - >>> thread = api.load_project_version_handler(project, version, async_req=True) - >>> result = thread.get() :param project: Project id (required) :type project: str :param version: Version id (required) :type version: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: Project - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the load_project_version_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.load_project_version_handler_with_http_info(project, version, **kwargs) # noqa: E501 - - @validate_arguments - def load_project_version_handler_with_http_info(self, project : Annotated[StrictStr, Field(..., description="Project id")], version : Annotated[StrictStr, Field(..., description="Version id")], **kwargs) -> ApiResponse: # noqa: E501 - """Retrieves details about the given version of a project. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.load_project_version_handler_with_http_info(project, version, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._load_project_version_handler_serialize( + project=project, + version=version, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Project", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def load_project_version_handler_with_http_info( + self, + project: Annotated[StrictStr, Field(description="Project id")], + version: Annotated[StrictStr, Field(description="Version id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[Project]: + """Retrieves details about the given version of a project. + :param project: Project id (required) :type project: str :param version: Version id (required) :type version: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(Project, status_code(int), headers(HTTPHeaderDict)) - """ + """ # noqa: E501 + + _param = self._load_project_version_handler_serialize( + project=project, + version=version, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Project", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _params = locals() - _all_params = [ - 'project', - 'version' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + @validate_call + def load_project_version_handler_without_preload_content( + self, + project: Annotated[StrictStr, Field(description="Project id")], + version: Annotated[StrictStr, Field(description="Version id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Retrieves details about the given version of a project. + + + :param project: Project id (required) + :type project: str + :param version: Version id (required) + :type version: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._load_project_version_handler_serialize( + project=project, + version=version, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method load_project_version_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': "Project", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} - if _params['project']: - _path_params['project'] = _params['project'] + def _load_project_version_handler_serialize( + self, + project, + version, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: - if _params['version']: - _path_params['version'] = _params['version'] + _host = None + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + # process the path parameters + if project is not None: + _path_params['project'] = project + if version is not None: + _path_params['version'] = version # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None + + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = { - '200': "Project", - } + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/project/{project}/{version}', 'GET', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='GET', + resource_path='/project/{project}/{version}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) - @validate_arguments - def project_versions_handler(self, project : Annotated[StrictStr, Field(..., description="Project id")], **kwargs) -> List[ProjectVersion]: # noqa: E501 - """Lists all available versions of a project. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.project_versions_handler(project, async_req=True) - >>> result = thread.get() + + @validate_call + def project_versions_handler( + self, + project: Annotated[StrictStr, Field(description="Project id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> List[ProjectVersion]: + """Lists all available versions of a project. + :param project: Project id (required) :type project: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: List[ProjectVersion] - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the project_versions_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.project_versions_handler_with_http_info(project, **kwargs) # noqa: E501 - - @validate_arguments - def project_versions_handler_with_http_info(self, project : Annotated[StrictStr, Field(..., description="Project id")], **kwargs) -> ApiResponse: # noqa: E501 - """Lists all available versions of a project. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.project_versions_handler_with_http_info(project, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._project_versions_handler_serialize( + project=project, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[ProjectVersion]", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def project_versions_handler_with_http_info( + self, + project: Annotated[StrictStr, Field(description="Project id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[List[ProjectVersion]]: + """Lists all available versions of a project. + :param project: Project id (required) :type project: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(List[ProjectVersion], status_code(int), headers(HTTPHeaderDict)) - """ + """ # noqa: E501 + + _param = self._project_versions_handler_serialize( + project=project, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[ProjectVersion]", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _params = locals() - _all_params = [ - 'project' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + @validate_call + def project_versions_handler_without_preload_content( + self, + project: Annotated[StrictStr, Field(description="Project id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Lists all available versions of a project. + + + :param project: Project id (required) + :type project: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._project_versions_handler_serialize( + project=project, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method project_versions_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[ProjectVersion]", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} - if _params['project']: - _path_params['project'] = _params['project'] + def _project_versions_handler_serialize( + self, + project, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + # process the path parameters + if project is not None: + _path_params['project'] = project # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None + + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = { - '200': "List[ProjectVersion]", - } + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/project/{project}/versions', 'GET', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='GET', + resource_path='/project/{project}/versions', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + - @validate_arguments - def update_project_handler(self, project : Annotated[StrictStr, Field(..., description="Project id")], update_project : UpdateProject, **kwargs) -> None: # noqa: E501 - """Updates a project. # noqa: E501 - This will create a new version. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.update_project_handler(project, update_project, async_req=True) - >>> result = thread.get() + @validate_call + def update_project_handler( + self, + project: Annotated[StrictStr, Field(description="Project id")], + update_project: UpdateProject, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Updates a project. This will create a new version. + :param project: Project id (required) :type project: str :param update_project: (required) :type update_project: UpdateProject - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the update_project_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.update_project_handler_with_http_info(project, update_project, **kwargs) # noqa: E501 - - @validate_arguments - def update_project_handler_with_http_info(self, project : Annotated[StrictStr, Field(..., description="Project id")], update_project : UpdateProject, **kwargs) -> ApiResponse: # noqa: E501 - """Updates a project. # noqa: E501 - - This will create a new version. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.update_project_handler_with_http_info(project, update_project, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._update_project_handler_serialize( + project=project, + update_project=update_project, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def update_project_handler_with_http_info( + self, + project: Annotated[StrictStr, Field(description="Project id")], + update_project: UpdateProject, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Updates a project. This will create a new version. + :param project: Project id (required) :type project: str :param update_project: (required) :type update_project: UpdateProject - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ + """ # noqa: E501 + + _param = self._update_project_handler_serialize( + project=project, + update_project=update_project, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - _params = locals() + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _all_params = [ - 'project', - 'update_project' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + + @validate_call + def update_project_handler_without_preload_content( + self, + project: Annotated[StrictStr, Field(description="Project id")], + update_project: UpdateProject, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Updates a project. This will create a new version. + + + :param project: Project id (required) + :type project: str + :param update_project: (required) + :type update_project: UpdateProject + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_project_handler_serialize( + project=project, + update_project=update_project, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method update_project_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} - if _params['project']: - _path_params['project'] = _params['project'] + def _update_project_handler_serialize( + self, + project, + update_project, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + _collection_formats: Dict[str, str] = { + } + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if project is not None: + _path_params['project'] = project # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None - if _params['update_project'] is not None: - _body_params = _params['update_project'] + if update_project is not None: + _body_params = update_project + + # set the HTTP header `Content-Type` - _content_types_list = _params.get('_content_type', - self.api_client.select_header_content_type( - ['application/json'])) - if _content_types_list: - _header_params['Content-Type'] = _content_types_list + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - - _response_types_map = {} + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/project/{project}', 'PATCH', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='PATCH', + resource_path='/project/{project}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/python/geoengine_openapi_client/api/session_api.py b/python/geoengine_openapi_client/api/session_api.py index f594533d..a030a368 100644 --- a/python/geoengine_openapi_client/api/session_api.py +++ b/python/geoengine_openapi_client/api/session_api.py @@ -12,25 +12,21 @@ Do not edit the class manually. """ # noqa: E501 - -import re # noqa: F401 -import io import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated -from pydantic import validate_arguments, ValidationError - +from pydantic import StrictStr from geoengine_openapi_client.models.auth_code_request_url import AuthCodeRequestURL from geoengine_openapi_client.models.auth_code_response import AuthCodeResponse from geoengine_openapi_client.models.user_credentials import UserCredentials from geoengine_openapi_client.models.user_registration import UserRegistration from geoengine_openapi_client.models.user_session import UserSession -from geoengine_openapi_client.api_client import ApiClient +from geoengine_openapi_client.api_client import ApiClient, RequestSerialized from geoengine_openapi_client.api_response import ApiResponse -from geoengine_openapi_client.exceptions import ( # noqa: F401 - ApiTypeError, - ApiValueError -) +from geoengine_openapi_client.rest import RESTResponseType class SessionApi: @@ -45,955 +41,1781 @@ def __init__(self, api_client=None) -> None: api_client = ApiClient.get_default() self.api_client = api_client - @validate_arguments - def anonymous_handler(self, **kwargs) -> UserSession: # noqa: E501 - """Creates session for anonymous user. The session's id serves as a Bearer token for requests. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True + @validate_call + def anonymous_handler( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> UserSession: + """Creates session for anonymous user. The session's id serves as a Bearer token for requests. - >>> thread = api.anonymous_handler(async_req=True) - >>> result = thread.get() - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: UserSession - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the anonymous_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.anonymous_handler_with_http_info(**kwargs) # noqa: E501 - - @validate_arguments - def anonymous_handler_with_http_info(self, **kwargs) -> ApiResponse: # noqa: E501 - """Creates session for anonymous user. The session's id serves as a Bearer token for requests. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.anonymous_handler_with_http_info(async_req=True) - >>> result = thread.get() - - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional + """ # noqa: E501 + + _param = self._anonymous_handler_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "UserSession", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def anonymous_handler_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[UserSession]: + """Creates session for anonymous user. The session's id serves as a Bearer token for requests. + + :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(UserSession, status_code(int), headers(HTTPHeaderDict)) - """ + """ # noqa: E501 - _params = locals() + _param = self._anonymous_handler_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - _all_params = [ - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + _response_types_map: Dict[str, Optional[str]] = { + '200': "UserSession", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def anonymous_handler_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Creates session for anonymous user. The session's id serves as a Bearer token for requests. + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._anonymous_handler_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method anonymous_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': "UserSession", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} + def _anonymous_handler_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None + + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) - # authentication setting - _auth_settings = [] # noqa: E501 - _response_types_map = { - '200': "UserSession", - } + # authentication setting + _auth_settings: List[str] = [ + ] - return self.api_client.call_api( - '/anonymous', 'POST', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='POST', + resource_path='/anonymous', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + - @validate_arguments - def login_handler(self, user_credentials : UserCredentials, **kwargs) -> UserSession: # noqa: E501 - """Creates a session by providing user credentials. The session's id serves as a Bearer token for requests. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.login_handler(user_credentials, async_req=True) - >>> result = thread.get() + @validate_call + def login_handler( + self, + user_credentials: UserCredentials, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> UserSession: + """Creates a session by providing user credentials. The session's id serves as a Bearer token for requests. + :param user_credentials: (required) :type user_credentials: UserCredentials - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: UserSession - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the login_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.login_handler_with_http_info(user_credentials, **kwargs) # noqa: E501 - - @validate_arguments - def login_handler_with_http_info(self, user_credentials : UserCredentials, **kwargs) -> ApiResponse: # noqa: E501 - """Creates a session by providing user credentials. The session's id serves as a Bearer token for requests. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.login_handler_with_http_info(user_credentials, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._login_handler_serialize( + user_credentials=user_credentials, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "UserSession", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def login_handler_with_http_info( + self, + user_credentials: UserCredentials, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[UserSession]: + """Creates a session by providing user credentials. The session's id serves as a Bearer token for requests. + :param user_credentials: (required) :type user_credentials: UserCredentials - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(UserSession, status_code(int), headers(HTTPHeaderDict)) - """ + """ # noqa: E501 + + _param = self._login_handler_serialize( + user_credentials=user_credentials, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "UserSession", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _params = locals() - _all_params = [ - 'user_credentials' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + @validate_call + def login_handler_without_preload_content( + self, + user_credentials: UserCredentials, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Creates a session by providing user credentials. The session's id serves as a Bearer token for requests. + + + :param user_credentials: (required) + :type user_credentials: UserCredentials + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._login_handler_serialize( + user_credentials=user_credentials, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method login_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': "UserSession", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} + def _login_handler_serialize( + self, + user_credentials, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None - if _params['user_credentials'] is not None: - _body_params = _params['user_credentials'] + if user_credentials is not None: + _body_params = user_credentials + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) # set the HTTP header `Content-Type` - _content_types_list = _params.get('_content_type', - self.api_client.select_header_content_type( - ['application/json'])) - if _content_types_list: - _header_params['Content-Type'] = _content_types_list + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type # authentication setting - _auth_settings = [] # noqa: E501 - - _response_types_map = { - '200': "UserSession", - } + _auth_settings: List[str] = [ + ] - return self.api_client.call_api( - '/login', 'POST', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='POST', + resource_path='/login', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + + - @validate_arguments - def logout_handler(self, **kwargs) -> None: # noqa: E501 - """Ends a session. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True + @validate_call + def logout_handler( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Ends a session. - >>> thread = api.logout_handler(async_req=True) - >>> result = thread.get() - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the logout_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.logout_handler_with_http_info(**kwargs) # noqa: E501 - - @validate_arguments - def logout_handler_with_http_info(self, **kwargs) -> ApiResponse: # noqa: E501 - """Ends a session. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.logout_handler_with_http_info(async_req=True) - >>> result = thread.get() - - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional + """ # noqa: E501 + + _param = self._logout_handler_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def logout_handler_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Ends a session. + + :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ + """ # noqa: E501 - _params = locals() + _param = self._logout_handler_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - _all_params = [ - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def logout_handler_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Ends a session. + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._logout_handler_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method logout_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} + def _logout_handler_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = {} - return self.api_client.call_api( - '/logout', 'POST', - _path_params, - _query_params, - _header_params, + + + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/logout', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) - @validate_arguments - def oidc_init(self, **kwargs) -> AuthCodeRequestURL: # noqa: E501 - """Initializes the Open Id Connect login procedure by requesting a parametrized url to the configured Id Provider. # noqa: E501 - # Errors This call fails if Open ID Connect is disabled, misconfigured or the Id Provider is unreachable. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.oidc_init(async_req=True) - >>> result = thread.get() - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + @validate_call + def oidc_init( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> AuthCodeRequestURL: + """Initializes the Open Id Connect login procedure by requesting a parametrized url to the configured Id Provider. + + # Errors This call fails if Open ID Connect is disabled, misconfigured or the Id Provider is unreachable. + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: AuthCodeRequestURL - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the oidc_init_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.oidc_init_with_http_info(**kwargs) # noqa: E501 - - @validate_arguments - def oidc_init_with_http_info(self, **kwargs) -> ApiResponse: # noqa: E501 - """Initializes the Open Id Connect login procedure by requesting a parametrized url to the configured Id Provider. # noqa: E501 - - # Errors This call fails if Open ID Connect is disabled, misconfigured or the Id Provider is unreachable. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.oidc_init_with_http_info(async_req=True) - >>> result = thread.get() - - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional + """ # noqa: E501 + + _param = self._oidc_init_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "AuthCodeRequestURL", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def oidc_init_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[AuthCodeRequestURL]: + """Initializes the Open Id Connect login procedure by requesting a parametrized url to the configured Id Provider. + + # Errors This call fails if Open ID Connect is disabled, misconfigured or the Id Provider is unreachable. + :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(AuthCodeRequestURL, status_code(int), headers(HTTPHeaderDict)) - """ + """ # noqa: E501 - _params = locals() + _param = self._oidc_init_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - _all_params = [ - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + _response_types_map: Dict[str, Optional[str]] = { + '200': "AuthCodeRequestURL", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def oidc_init_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Initializes the Open Id Connect login procedure by requesting a parametrized url to the configured Id Provider. + + # Errors This call fails if Open ID Connect is disabled, misconfigured or the Id Provider is unreachable. + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._oidc_init_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method oidc_init" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': "AuthCodeRequestURL", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} + def _oidc_init_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None + + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) - # authentication setting - _auth_settings = [] # noqa: E501 - _response_types_map = { - '200': "AuthCodeRequestURL", - } + # authentication setting + _auth_settings: List[str] = [ + ] - return self.api_client.call_api( - '/oidcInit', 'POST', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='POST', + resource_path='/oidcInit', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + - @validate_arguments - def oidc_login(self, auth_code_response : AuthCodeResponse, **kwargs) -> UserSession: # noqa: E501 - """Creates a session for a user via a login with Open Id Connect. # noqa: E501 - This call must be preceded by a call to oidcInit and match the parameters of that call. # Errors This call fails if the [`AuthCodeResponse`] is invalid, if a previous oidcLogin call with the same state was already successfully or unsuccessfully resolved, if the Open Id Connect configuration is invalid, or if the Id Provider is unreachable. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.oidc_login(auth_code_response, async_req=True) - >>> result = thread.get() + @validate_call + def oidc_login( + self, + auth_code_response: AuthCodeResponse, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> UserSession: + """Creates a session for a user via a login with Open Id Connect. This call must be preceded by a call to oidcInit and match the parameters of that call. + + # Errors This call fails if the [`AuthCodeResponse`] is invalid, if a previous oidcLogin call with the same state was already successfully or unsuccessfully resolved, if the Open Id Connect configuration is invalid, or if the Id Provider is unreachable. :param auth_code_response: (required) :type auth_code_response: AuthCodeResponse - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: UserSession - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the oidc_login_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.oidc_login_with_http_info(auth_code_response, **kwargs) # noqa: E501 - - @validate_arguments - def oidc_login_with_http_info(self, auth_code_response : AuthCodeResponse, **kwargs) -> ApiResponse: # noqa: E501 - """Creates a session for a user via a login with Open Id Connect. # noqa: E501 - - This call must be preceded by a call to oidcInit and match the parameters of that call. # Errors This call fails if the [`AuthCodeResponse`] is invalid, if a previous oidcLogin call with the same state was already successfully or unsuccessfully resolved, if the Open Id Connect configuration is invalid, or if the Id Provider is unreachable. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.oidc_login_with_http_info(auth_code_response, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._oidc_login_serialize( + auth_code_response=auth_code_response, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "UserSession", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def oidc_login_with_http_info( + self, + auth_code_response: AuthCodeResponse, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[UserSession]: + """Creates a session for a user via a login with Open Id Connect. This call must be preceded by a call to oidcInit and match the parameters of that call. + + # Errors This call fails if the [`AuthCodeResponse`] is invalid, if a previous oidcLogin call with the same state was already successfully or unsuccessfully resolved, if the Open Id Connect configuration is invalid, or if the Id Provider is unreachable. :param auth_code_response: (required) :type auth_code_response: AuthCodeResponse - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(UserSession, status_code(int), headers(HTTPHeaderDict)) - """ + """ # noqa: E501 + + _param = self._oidc_login_serialize( + auth_code_response=auth_code_response, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - _params = locals() + _response_types_map: Dict[str, Optional[str]] = { + '200': "UserSession", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _all_params = [ - 'auth_code_response' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + + @validate_call + def oidc_login_without_preload_content( + self, + auth_code_response: AuthCodeResponse, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Creates a session for a user via a login with Open Id Connect. This call must be preceded by a call to oidcInit and match the parameters of that call. + + # Errors This call fails if the [`AuthCodeResponse`] is invalid, if a previous oidcLogin call with the same state was already successfully or unsuccessfully resolved, if the Open Id Connect configuration is invalid, or if the Id Provider is unreachable. + + :param auth_code_response: (required) + :type auth_code_response: AuthCodeResponse + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._oidc_login_serialize( + auth_code_response=auth_code_response, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method oidc_login" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': "UserSession", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} + def _oidc_login_serialize( + self, + auth_code_response, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + # process the path parameters # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None - if _params['auth_code_response'] is not None: - _body_params = _params['auth_code_response'] + if auth_code_response is not None: + _body_params = auth_code_response + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) # set the HTTP header `Content-Type` - _content_types_list = _params.get('_content_type', - self.api_client.select_header_content_type( - ['application/json'])) - if _content_types_list: - _header_params['Content-Type'] = _content_types_list + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type # authentication setting - _auth_settings = [] # noqa: E501 - - _response_types_map = { - '200': "UserSession", - } + _auth_settings: List[str] = [ + ] - return self.api_client.call_api( - '/oidcLogin', 'POST', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='POST', + resource_path='/oidcLogin', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + - @validate_arguments - def register_user_handler(self, user_registration : UserRegistration, **kwargs) -> str: # noqa: E501 - """Registers a user. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.register_user_handler(user_registration, async_req=True) - >>> result = thread.get() + @validate_call + def register_user_handler( + self, + user_registration: UserRegistration, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """Registers a user. + :param user_registration: (required) :type user_registration: UserRegistration - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: str - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the register_user_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.register_user_handler_with_http_info(user_registration, **kwargs) # noqa: E501 - - @validate_arguments - def register_user_handler_with_http_info(self, user_registration : UserRegistration, **kwargs) -> ApiResponse: # noqa: E501 - """Registers a user. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.register_user_handler_with_http_info(user_registration, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._register_user_handler_serialize( + user_registration=user_registration, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def register_user_handler_with_http_info( + self, + user_registration: UserRegistration, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """Registers a user. + :param user_registration: (required) :type user_registration: UserRegistration - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(str, status_code(int), headers(HTTPHeaderDict)) - """ + """ # noqa: E501 + + _param = self._register_user_handler_serialize( + user_registration=user_registration, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - _params = locals() + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _all_params = [ - 'user_registration' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + + @validate_call + def register_user_handler_without_preload_content( + self, + user_registration: UserRegistration, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Registers a user. + + + :param user_registration: (required) + :type user_registration: UserRegistration + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._register_user_handler_serialize( + user_registration=user_registration, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method register_user_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} + def _register_user_handler_serialize( + self, + user_registration, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None - if _params['user_registration'] is not None: - _body_params = _params['user_registration'] + if user_registration is not None: + _body_params = user_registration + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) # set the HTTP header `Content-Type` - _content_types_list = _params.get('_content_type', - self.api_client.select_header_content_type( - ['application/json'])) - if _content_types_list: - _header_params['Content-Type'] = _content_types_list + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type # authentication setting - _auth_settings = [] # noqa: E501 - - _response_types_map = { - '200': "str", - } + _auth_settings: List[str] = [ + ] - return self.api_client.call_api( - '/user', 'POST', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='POST', + resource_path='/user', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + + - @validate_arguments - def session_handler(self, **kwargs) -> UserSession: # noqa: E501 - """Retrieves details about the current session. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True + @validate_call + def session_handler( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> UserSession: + """Retrieves details about the current session. - >>> thread = api.session_handler(async_req=True) - >>> result = thread.get() - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: UserSession - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the session_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.session_handler_with_http_info(**kwargs) # noqa: E501 - - @validate_arguments - def session_handler_with_http_info(self, **kwargs) -> ApiResponse: # noqa: E501 - """Retrieves details about the current session. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.session_handler_with_http_info(async_req=True) - >>> result = thread.get() - - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional + """ # noqa: E501 + + _param = self._session_handler_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "UserSession", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def session_handler_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[UserSession]: + """Retrieves details about the current session. + + :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(UserSession, status_code(int), headers(HTTPHeaderDict)) - """ + """ # noqa: E501 - _params = locals() + _param = self._session_handler_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - _all_params = [ - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + _response_types_map: Dict[str, Optional[str]] = { + '200': "UserSession", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def session_handler_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Retrieves details about the current session. + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._session_handler_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method session_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': "UserSession", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} + def _session_handler_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + # process the path parameters # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None + + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = { - '200': "UserSession", - } + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/session', 'GET', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='GET', + resource_path='/session', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/python/geoengine_openapi_client/api/spatial_references_api.py b/python/geoengine_openapi_client/api/spatial_references_api.py index c7fbff38..ca4a0284 100644 --- a/python/geoengine_openapi_client/api/spatial_references_api.py +++ b/python/geoengine_openapi_client/api/spatial_references_api.py @@ -12,23 +12,17 @@ Do not edit the class manually. """ # noqa: E501 - -import re # noqa: F401 -import io import warnings - -from pydantic import validate_arguments, ValidationError +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated from pydantic import StrictStr - from geoengine_openapi_client.models.spatial_reference_specification import SpatialReferenceSpecification -from geoengine_openapi_client.api_client import ApiClient +from geoengine_openapi_client.api_client import ApiClient, RequestSerialized from geoengine_openapi_client.api_response import ApiResponse -from geoengine_openapi_client.exceptions import ( # noqa: F401 - ApiTypeError, - ApiValueError -) +from geoengine_openapi_client.rest import RESTResponseType class SpatialReferencesApi: @@ -43,140 +37,260 @@ def __init__(self, api_client=None) -> None: api_client = ApiClient.get_default() self.api_client = api_client - @validate_arguments - def get_spatial_reference_specification_handler(self, srs_string : StrictStr, **kwargs) -> SpatialReferenceSpecification: # noqa: E501 - """get_spatial_reference_specification_handler # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True + @validate_call + def get_spatial_reference_specification_handler( + self, + srs_string: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> SpatialReferenceSpecification: + """get_spatial_reference_specification_handler - >>> thread = api.get_spatial_reference_specification_handler(srs_string, async_req=True) - >>> result = thread.get() :param srs_string: (required) :type srs_string: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: SpatialReferenceSpecification - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the get_spatial_reference_specification_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.get_spatial_reference_specification_handler_with_http_info(srs_string, **kwargs) # noqa: E501 - - @validate_arguments - def get_spatial_reference_specification_handler_with_http_info(self, srs_string : StrictStr, **kwargs) -> ApiResponse: # noqa: E501 - """get_spatial_reference_specification_handler # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.get_spatial_reference_specification_handler_with_http_info(srs_string, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._get_spatial_reference_specification_handler_serialize( + srs_string=srs_string, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "SpatialReferenceSpecification", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_spatial_reference_specification_handler_with_http_info( + self, + srs_string: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[SpatialReferenceSpecification]: + """get_spatial_reference_specification_handler + :param srs_string: (required) :type srs_string: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(SpatialReferenceSpecification, status_code(int), headers(HTTPHeaderDict)) - """ + """ # noqa: E501 + + _param = self._get_spatial_reference_specification_handler_serialize( + srs_string=srs_string, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "SpatialReferenceSpecification", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _params = locals() - _all_params = [ - 'srs_string' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + @validate_call + def get_spatial_reference_specification_handler_without_preload_content( + self, + srs_string: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """get_spatial_reference_specification_handler + + + :param srs_string: (required) + :type srs_string: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_spatial_reference_specification_handler_serialize( + srs_string=srs_string, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method get_spatial_reference_specification_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': "SpatialReferenceSpecification", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} - if _params['srs_string']: - _path_params['srsString'] = _params['srs_string'] + def _get_spatial_reference_specification_handler_serialize( + self, + srs_string, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + # process the path parameters + if srs_string is not None: + _path_params['srsString'] = srs_string # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None + + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = { - '200': "SpatialReferenceSpecification", - } + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/spatialReferenceSpecification/{srsString}', 'GET', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='GET', + resource_path='/spatialReferenceSpecification/{srsString}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/python/geoengine_openapi_client/api/tasks_api.py b/python/geoengine_openapi_client/api/tasks_api.py index 0bef6cd6..31256665 100644 --- a/python/geoengine_openapi_client/api/tasks_api.py +++ b/python/geoengine_openapi_client/api/tasks_api.py @@ -12,27 +12,21 @@ Do not edit the class manually. """ # noqa: E501 - -import re # noqa: F401 -import io import warnings - -from pydantic import validate_arguments, ValidationError - +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union from typing_extensions import Annotated -from pydantic import Field, StrictBool, StrictStr, conint - -from typing import Any, List, Optional +from pydantic import Field, StrictBool, StrictStr +from typing import List, Optional +from typing_extensions import Annotated +from geoengine_openapi_client.models.task_filter import TaskFilter from geoengine_openapi_client.models.task_status import TaskStatus from geoengine_openapi_client.models.task_status_with_id import TaskStatusWithId -from geoengine_openapi_client.api_client import ApiClient +from geoengine_openapi_client.api_client import ApiClient, RequestSerialized from geoengine_openapi_client.api_response import ApiResponse -from geoengine_openapi_client.exceptions import ( # noqa: F401 - ApiTypeError, - ApiValueError -) +from geoengine_openapi_client.rest import RESTResponseType class TasksApi: @@ -47,157 +41,299 @@ def __init__(self, api_client=None) -> None: api_client = ApiClient.get_default() self.api_client = api_client - @validate_arguments - def abort_handler(self, id : Annotated[StrictStr, Field(..., description="Task id")], force : Optional[StrictBool] = None, **kwargs) -> None: # noqa: E501 - """Abort a running task. # noqa: E501 - # Parameters * `force` - If true, the task will be aborted without clean-up. You can abort a task that is already in the process of aborting. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True + @validate_call + def abort_handler( + self, + id: Annotated[StrictStr, Field(description="Task id")], + force: Optional[StrictBool] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Abort a running task. - >>> thread = api.abort_handler(id, force, async_req=True) - >>> result = thread.get() + # Parameters * `force` - If true, the task will be aborted without clean-up. You can abort a task that is already in the process of aborting. :param id: Task id (required) :type id: str :param force: :type force: bool - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the abort_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.abort_handler_with_http_info(id, force, **kwargs) # noqa: E501 - - @validate_arguments - def abort_handler_with_http_info(self, id : Annotated[StrictStr, Field(..., description="Task id")], force : Optional[StrictBool] = None, **kwargs) -> ApiResponse: # noqa: E501 - """Abort a running task. # noqa: E501 - - # Parameters * `force` - If true, the task will be aborted without clean-up. You can abort a task that is already in the process of aborting. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.abort_handler_with_http_info(id, force, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._abort_handler_serialize( + id=id, + force=force, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '202': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def abort_handler_with_http_info( + self, + id: Annotated[StrictStr, Field(description="Task id")], + force: Optional[StrictBool] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Abort a running task. + + # Parameters * `force` - If true, the task will be aborted without clean-up. You can abort a task that is already in the process of aborting. :param id: Task id (required) :type id: str :param force: :type force: bool - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ + """ # noqa: E501 + + _param = self._abort_handler_serialize( + id=id, + force=force, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '202': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _params = locals() - _all_params = [ - 'id', - 'force' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + @validate_call + def abort_handler_without_preload_content( + self, + id: Annotated[StrictStr, Field(description="Task id")], + force: Optional[StrictBool] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Abort a running task. + + # Parameters * `force` - If true, the task will be aborted without clean-up. You can abort a task that is already in the process of aborting. + + :param id: Task id (required) + :type id: str + :param force: + :type force: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._abort_handler_serialize( + id=id, + force=force, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method abort_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '202': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} - if _params['id']: - _path_params['id'] = _params['id'] + def _abort_handler_serialize( + self, + id, + force, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + _host = None - # process the query parameters - _query_params = [] - if _params.get('force') is not None: # noqa: E501 - _query_params.append(('force', _params['force'])) + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + # process the path parameters + if id is not None: + _path_params['id'] = id + # process the query parameters + if force is not None: + + _query_params.append(('force', force)) + # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = {} - return self.api_client.call_api( - '/tasks/{id}', 'DELETE', - _path_params, - _query_params, - _header_params, + + + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/tasks/{id}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + - @validate_arguments - def list_handler(self, filter : Optional[Any], offset : conint(strict=True, ge=0), limit : conint(strict=True, ge=0), **kwargs) -> List[TaskStatusWithId]: # noqa: E501 - """Retrieve the status of all tasks. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_handler(filter, offset, limit, async_req=True) - >>> result = thread.get() + @validate_call + def list_handler( + self, + filter: Optional[TaskFilter], + offset: Annotated[int, Field(strict=True, ge=0)], + limit: Annotated[int, Field(strict=True, ge=0)], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> List[TaskStatusWithId]: + """Retrieve the status of all tasks. + :param filter: (required) :type filter: TaskFilter @@ -205,32 +341,73 @@ def list_handler(self, filter : Optional[Any], offset : conint(strict=True, ge=0 :type offset: int :param limit: (required) :type limit: int - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: List[TaskStatusWithId] - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the list_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.list_handler_with_http_info(filter, offset, limit, **kwargs) # noqa: E501 - - @validate_arguments - def list_handler_with_http_info(self, filter : Optional[Any], offset : conint(strict=True, ge=0), limit : conint(strict=True, ge=0), **kwargs) -> ApiResponse: # noqa: E501 - """Retrieve the status of all tasks. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.list_handler_with_http_info(filter, offset, limit, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._list_handler_serialize( + filter=filter, + offset=offset, + limit=limit, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[TaskStatusWithId]", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def list_handler_with_http_info( + self, + filter: Optional[TaskFilter], + offset: Annotated[int, Field(strict=True, ge=0)], + limit: Annotated[int, Field(strict=True, ge=0)], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[List[TaskStatusWithId]]: + """Retrieve the status of all tasks. + :param filter: (required) :type filter: TaskFilter @@ -238,245 +415,445 @@ def list_handler_with_http_info(self, filter : Optional[Any], offset : conint(st :type offset: int :param limit: (required) :type limit: int - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(List[TaskStatusWithId], status_code(int), headers(HTTPHeaderDict)) - """ + """ # noqa: E501 + + _param = self._list_handler_serialize( + filter=filter, + offset=offset, + limit=limit, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - _params = locals() + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[TaskStatusWithId]", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _all_params = [ - 'filter', - 'offset', - 'limit' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + + @validate_call + def list_handler_without_preload_content( + self, + filter: Optional[TaskFilter], + offset: Annotated[int, Field(strict=True, ge=0)], + limit: Annotated[int, Field(strict=True, ge=0)], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Retrieve the status of all tasks. + + + :param filter: (required) + :type filter: TaskFilter + :param offset: (required) + :type offset: int + :param limit: (required) + :type limit: int + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_handler_serialize( + filter=filter, + offset=offset, + limit=limit, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method list_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[TaskStatusWithId]", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} - if _params['filter']: - _path_params['filter'] = _params['filter'] + def _list_handler_serialize( + self, + filter, + offset, + limit, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: - if _params['offset']: - _path_params['offset'] = _params['offset'] + _host = None - if _params['limit']: - _path_params['limit'] = _params['limit'] + _collection_formats: Dict[str, str] = { + } + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + # process the path parameters + if filter is not None: + _path_params['filter'] = filter.value + if offset is not None: + _path_params['offset'] = offset + if limit is not None: + _path_params['limit'] = limit # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None + + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = { - '200': "List[TaskStatusWithId]", - } + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/tasks/list', 'GET', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='GET', + resource_path='/tasks/list', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + - @validate_arguments - def status_handler(self, id : Annotated[StrictStr, Field(..., description="Task id")], **kwargs) -> TaskStatus: # noqa: E501 - """Retrieve the status of a task. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.status_handler(id, async_req=True) - >>> result = thread.get() + @validate_call + def status_handler( + self, + id: Annotated[StrictStr, Field(description="Task id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> TaskStatus: + """Retrieve the status of a task. + :param id: Task id (required) :type id: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: TaskStatus - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the status_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.status_handler_with_http_info(id, **kwargs) # noqa: E501 - - @validate_arguments - def status_handler_with_http_info(self, id : Annotated[StrictStr, Field(..., description="Task id")], **kwargs) -> ApiResponse: # noqa: E501 - """Retrieve the status of a task. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.status_handler_with_http_info(id, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._status_handler_serialize( + id=id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "TaskStatus", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def status_handler_with_http_info( + self, + id: Annotated[StrictStr, Field(description="Task id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[TaskStatus]: + """Retrieve the status of a task. + :param id: Task id (required) :type id: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(TaskStatus, status_code(int), headers(HTTPHeaderDict)) - """ + """ # noqa: E501 + + _param = self._status_handler_serialize( + id=id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - _params = locals() + _response_types_map: Dict[str, Optional[str]] = { + '200': "TaskStatus", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _all_params = [ - 'id' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + + @validate_call + def status_handler_without_preload_content( + self, + id: Annotated[StrictStr, Field(description="Task id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Retrieve the status of a task. + + + :param id: Task id (required) + :type id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._status_handler_serialize( + id=id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method status_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': "TaskStatus", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} - if _params['id']: - _path_params['id'] = _params['id'] + def _status_handler_serialize( + self, + id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + _host = None + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if id is not None: + _path_params['id'] = id # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None + + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = { - '200': "TaskStatus", - } + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/tasks/{id}/status', 'GET', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='GET', + resource_path='/tasks/{id}/status', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/python/geoengine_openapi_client/api/uploads_api.py b/python/geoengine_openapi_client/api/uploads_api.py index 58a5720a..b330c64f 100644 --- a/python/geoengine_openapi_client/api/uploads_api.py +++ b/python/geoengine_openapi_client/api/uploads_api.py @@ -12,28 +12,21 @@ Do not edit the class manually. """ # noqa: E501 - -import re # noqa: F401 -import io import warnings - -from pydantic import validate_arguments, ValidationError - +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union from typing_extensions import Annotated -from pydantic import Field, StrictBytes, StrictStr, conlist -from typing import Union - -from geoengine_openapi_client.models.add_collection200_response import AddCollection200Response +from pydantic import Field, StrictBytes, StrictStr +from typing import List, Tuple, Union +from typing_extensions import Annotated +from geoengine_openapi_client.models.inline_object2 import InlineObject2 from geoengine_openapi_client.models.upload_file_layers_response import UploadFileLayersResponse from geoengine_openapi_client.models.upload_files_response import UploadFilesResponse -from geoengine_openapi_client.api_client import ApiClient +from geoengine_openapi_client.api_client import ApiClient, RequestSerialized from geoengine_openapi_client.api_response import ApiResponse -from geoengine_openapi_client.exceptions import ( # noqa: F401 - ApiTypeError, - ApiValueError -) +from geoengine_openapi_client.rest import RESTResponseType class UploadsApi: @@ -48,432 +41,805 @@ def __init__(self, api_client=None) -> None: api_client = ApiClient.get_default() self.api_client = api_client - @validate_arguments - def list_upload_file_layers_handler(self, upload_id : Annotated[StrictStr, Field(..., description="Upload id")], file_name : Annotated[StrictStr, Field(..., description="File name")], **kwargs) -> UploadFileLayersResponse: # noqa: E501 - """List the layers of on uploaded file. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True + @validate_call + def list_upload_file_layers_handler( + self, + upload_id: Annotated[StrictStr, Field(description="Upload id")], + file_name: Annotated[StrictStr, Field(description="File name")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> UploadFileLayersResponse: + """List the layers of on uploaded file. - >>> thread = api.list_upload_file_layers_handler(upload_id, file_name, async_req=True) - >>> result = thread.get() :param upload_id: Upload id (required) :type upload_id: str :param file_name: File name (required) :type file_name: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: UploadFileLayersResponse - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the list_upload_file_layers_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.list_upload_file_layers_handler_with_http_info(upload_id, file_name, **kwargs) # noqa: E501 - - @validate_arguments - def list_upload_file_layers_handler_with_http_info(self, upload_id : Annotated[StrictStr, Field(..., description="Upload id")], file_name : Annotated[StrictStr, Field(..., description="File name")], **kwargs) -> ApiResponse: # noqa: E501 - """List the layers of on uploaded file. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.list_upload_file_layers_handler_with_http_info(upload_id, file_name, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._list_upload_file_layers_handler_serialize( + upload_id=upload_id, + file_name=file_name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "UploadFileLayersResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def list_upload_file_layers_handler_with_http_info( + self, + upload_id: Annotated[StrictStr, Field(description="Upload id")], + file_name: Annotated[StrictStr, Field(description="File name")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[UploadFileLayersResponse]: + """List the layers of on uploaded file. + :param upload_id: Upload id (required) :type upload_id: str :param file_name: File name (required) :type file_name: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(UploadFileLayersResponse, status_code(int), headers(HTTPHeaderDict)) - """ + """ # noqa: E501 + + _param = self._list_upload_file_layers_handler_serialize( + upload_id=upload_id, + file_name=file_name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - _params = locals() + _response_types_map: Dict[str, Optional[str]] = { + '200': "UploadFileLayersResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _all_params = [ - 'upload_id', - 'file_name' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + + @validate_call + def list_upload_file_layers_handler_without_preload_content( + self, + upload_id: Annotated[StrictStr, Field(description="Upload id")], + file_name: Annotated[StrictStr, Field(description="File name")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """List the layers of on uploaded file. + + + :param upload_id: Upload id (required) + :type upload_id: str + :param file_name: File name (required) + :type file_name: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_upload_file_layers_handler_serialize( + upload_id=upload_id, + file_name=file_name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method list_upload_file_layers_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': "UploadFileLayersResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} - if _params['upload_id']: - _path_params['upload_id'] = _params['upload_id'] + def _list_upload_file_layers_handler_serialize( + self, + upload_id, + file_name, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None - if _params['file_name']: - _path_params['file_name'] = _params['file_name'] + _collection_formats: Dict[str, str] = { + } + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + # process the path parameters + if upload_id is not None: + _path_params['upload_id'] = upload_id + if file_name is not None: + _path_params['file_name'] = file_name # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None + + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = { - '200': "UploadFileLayersResponse", - } + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/uploads/{upload_id}/files/{file_name}/layers', 'GET', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='GET', + resource_path='/uploads/{upload_id}/files/{file_name}/layers', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) - @validate_arguments - def list_upload_files_handler(self, upload_id : Annotated[StrictStr, Field(..., description="Upload id")], **kwargs) -> UploadFilesResponse: # noqa: E501 - """List the files of on upload. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_upload_files_handler(upload_id, async_req=True) - >>> result = thread.get() + + @validate_call + def list_upload_files_handler( + self, + upload_id: Annotated[StrictStr, Field(description="Upload id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> UploadFilesResponse: + """List the files of on upload. + :param upload_id: Upload id (required) :type upload_id: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: UploadFilesResponse - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the list_upload_files_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.list_upload_files_handler_with_http_info(upload_id, **kwargs) # noqa: E501 - - @validate_arguments - def list_upload_files_handler_with_http_info(self, upload_id : Annotated[StrictStr, Field(..., description="Upload id")], **kwargs) -> ApiResponse: # noqa: E501 - """List the files of on upload. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.list_upload_files_handler_with_http_info(upload_id, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._list_upload_files_handler_serialize( + upload_id=upload_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "UploadFilesResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def list_upload_files_handler_with_http_info( + self, + upload_id: Annotated[StrictStr, Field(description="Upload id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[UploadFilesResponse]: + """List the files of on upload. + :param upload_id: Upload id (required) :type upload_id: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(UploadFilesResponse, status_code(int), headers(HTTPHeaderDict)) - """ + """ # noqa: E501 + + _param = self._list_upload_files_handler_serialize( + upload_id=upload_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "UploadFilesResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _params = locals() - _all_params = [ - 'upload_id' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + @validate_call + def list_upload_files_handler_without_preload_content( + self, + upload_id: Annotated[StrictStr, Field(description="Upload id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """List the files of on upload. + + + :param upload_id: Upload id (required) + :type upload_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_upload_files_handler_serialize( + upload_id=upload_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method list_upload_files_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': "UploadFilesResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} - if _params['upload_id']: - _path_params['upload_id'] = _params['upload_id'] + def _list_upload_files_handler_serialize( + self, + upload_id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + _host = None + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if upload_id is not None: + _path_params['upload_id'] = upload_id # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None + + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = { - '200': "UploadFilesResponse", - } + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/uploads/{upload_id}/files', 'GET', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='GET', + resource_path='/uploads/{upload_id}/files', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + - @validate_arguments - def upload_handler(self, files : conlist(Union[StrictBytes, StrictStr]), **kwargs) -> AddCollection200Response: # noqa: E501 - """Uploads files. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.upload_handler(files, async_req=True) - >>> result = thread.get() + @validate_call + def upload_handler( + self, + files: List[Union[StrictBytes, StrictStr, Tuple[StrictStr, StrictBytes]]], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> InlineObject2: + """Uploads files. + :param files: (required) :type files: List[bytearray] - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: AddCollection200Response - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the upload_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.upload_handler_with_http_info(files, **kwargs) # noqa: E501 - - @validate_arguments - def upload_handler_with_http_info(self, files : conlist(Union[StrictBytes, StrictStr]), **kwargs) -> ApiResponse: # noqa: E501 - """Uploads files. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.upload_handler_with_http_info(files, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._upload_handler_serialize( + files=files, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "InlineObject2", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def upload_handler_with_http_info( + self, + files: List[Union[StrictBytes, StrictStr, Tuple[StrictStr, StrictBytes]]], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[InlineObject2]: + """Uploads files. + :param files: (required) :type files: List[bytearray] - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(AddCollection200Response, status_code(int), headers(HTTPHeaderDict)) - """ + """ # noqa: E501 + + _param = self._upload_handler_serialize( + files=files, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - _params = locals() + _response_types_map: Dict[str, Optional[str]] = { + '200': "InlineObject2", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _all_params = [ - 'files' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + + @validate_call + def upload_handler_without_preload_content( + self, + files: List[Union[StrictBytes, StrictStr, Tuple[StrictStr, StrictBytes]]], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Uploads files. + + + :param files: (required) + :type files: List[bytearray] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._upload_handler_serialize( + files=files, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method upload_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': "InlineObject2", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} + def _upload_handler_serialize( + self, + files, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + _collection_formats: Dict[str, str] = { + 'files[]': 'csv', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} - if _params['files']: - _files['files[]'] = _params['files'] - _collection_formats['files[]'] = 'csv' - + if files is not None: + _files['files[]'] = files # process the body parameter - _body_params = None + + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) # set the HTTP header `Content-Type` - _content_types_list = _params.get('_content_type', - self.api_client.select_header_content_type( - ['multipart/form-data'])) - if _content_types_list: - _header_params['Content-Type'] = _content_types_list + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'multipart/form-data' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - - _response_types_map = { - '200': "AddCollection200Response", - } + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/upload', 'POST', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='POST', + resource_path='/upload', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/python/geoengine_openapi_client/api/user_api.py b/python/geoengine_openapi_client/api/user_api.py index 0e38d07c..7b87402d 100644 --- a/python/geoengine_openapi_client/api/user_api.py +++ b/python/geoengine_openapi_client/api/user_api.py @@ -12,35 +12,28 @@ Do not edit the class manually. """ # noqa: E501 - -import re # noqa: F401 -import io import warnings - -from pydantic import validate_arguments, ValidationError - +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union from typing_extensions import Annotated -from pydantic import Field, StrictStr, conint +from pydantic import Field, StrictStr from typing import List, Optional - -from geoengine_openapi_client.models.add_collection200_response import AddCollection200Response +from typing_extensions import Annotated from geoengine_openapi_client.models.add_role import AddRole from geoengine_openapi_client.models.computation_quota import ComputationQuota from geoengine_openapi_client.models.data_usage import DataUsage from geoengine_openapi_client.models.data_usage_summary import DataUsageSummary +from geoengine_openapi_client.models.inline_object2 import InlineObject2 from geoengine_openapi_client.models.operator_quota import OperatorQuota from geoengine_openapi_client.models.quota import Quota from geoengine_openapi_client.models.role_description import RoleDescription from geoengine_openapi_client.models.update_quota import UpdateQuota from geoengine_openapi_client.models.usage_summary_granularity import UsageSummaryGranularity -from geoengine_openapi_client.api_client import ApiClient +from geoengine_openapi_client.api_client import ApiClient, RequestSerialized from geoengine_openapi_client.api_response import ApiResponse -from geoengine_openapi_client.exceptions import ( # noqa: F401 - ApiTypeError, - ApiValueError -) +from geoengine_openapi_client.rest import RESTResponseType class UserApi: @@ -55,1834 +48,3460 @@ def __init__(self, api_client=None) -> None: api_client = ApiClient.get_default() self.api_client = api_client - @validate_arguments - def add_role_handler(self, add_role : AddRole, **kwargs) -> str: # noqa: E501 - """Add a new role. Requires admin privilige. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True + @validate_call + def add_role_handler( + self, + add_role: AddRole, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """Add a new role. Requires admin privilige. - >>> thread = api.add_role_handler(add_role, async_req=True) - >>> result = thread.get() :param add_role: (required) :type add_role: AddRole - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: str - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the add_role_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.add_role_handler_with_http_info(add_role, **kwargs) # noqa: E501 - - @validate_arguments - def add_role_handler_with_http_info(self, add_role : AddRole, **kwargs) -> ApiResponse: # noqa: E501 - """Add a new role. Requires admin privilige. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.add_role_handler_with_http_info(add_role, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._add_role_handler_serialize( + add_role=add_role, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def add_role_handler_with_http_info( + self, + add_role: AddRole, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """Add a new role. Requires admin privilige. + :param add_role: (required) :type add_role: AddRole - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(str, status_code(int), headers(HTTPHeaderDict)) - """ + """ # noqa: E501 + + _param = self._add_role_handler_serialize( + add_role=add_role, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _params = locals() - _all_params = [ - 'add_role' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + @validate_call + def add_role_handler_without_preload_content( + self, + add_role: AddRole, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Add a new role. Requires admin privilige. + + + :param add_role: (required) + :type add_role: AddRole + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_role_handler_serialize( + add_role=add_role, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method add_role_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} + def _add_role_handler_serialize( + self, + add_role, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + # process the path parameters # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None - if _params['add_role'] is not None: - _body_params = _params['add_role'] + if add_role is not None: + _body_params = add_role + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) # set the HTTP header `Content-Type` - _content_types_list = _params.get('_content_type', - self.api_client.select_header_content_type( - ['application/json'])) - if _content_types_list: - _header_params['Content-Type'] = _content_types_list + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - - _response_types_map = { - '200': "str", - } + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/roles', 'PUT', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='PUT', + resource_path='/roles', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + - @validate_arguments - def assign_role_handler(self, user : Annotated[StrictStr, Field(..., description="User id")], role : Annotated[StrictStr, Field(..., description="Role id")], **kwargs) -> None: # noqa: E501 - """Assign a role to a user. Requires admin privilige. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.assign_role_handler(user, role, async_req=True) - >>> result = thread.get() + @validate_call + def assign_role_handler( + self, + user: Annotated[StrictStr, Field(description="User id")], + role: Annotated[StrictStr, Field(description="Role id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Assign a role to a user. Requires admin privilige. + :param user: User id (required) :type user: str :param role: Role id (required) :type role: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the assign_role_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.assign_role_handler_with_http_info(user, role, **kwargs) # noqa: E501 - - @validate_arguments - def assign_role_handler_with_http_info(self, user : Annotated[StrictStr, Field(..., description="User id")], role : Annotated[StrictStr, Field(..., description="Role id")], **kwargs) -> ApiResponse: # noqa: E501 - """Assign a role to a user. Requires admin privilige. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.assign_role_handler_with_http_info(user, role, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._assign_role_handler_serialize( + user=user, + role=role, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def assign_role_handler_with_http_info( + self, + user: Annotated[StrictStr, Field(description="User id")], + role: Annotated[StrictStr, Field(description="Role id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Assign a role to a user. Requires admin privilige. + :param user: User id (required) :type user: str :param role: Role id (required) :type role: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ + """ # noqa: E501 + + _param = self._assign_role_handler_serialize( + user=user, + role=role, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _params = locals() - _all_params = [ - 'user', - 'role' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + @validate_call + def assign_role_handler_without_preload_content( + self, + user: Annotated[StrictStr, Field(description="User id")], + role: Annotated[StrictStr, Field(description="Role id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Assign a role to a user. Requires admin privilige. + + + :param user: User id (required) + :type user: str + :param role: Role id (required) + :type role: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._assign_role_handler_serialize( + user=user, + role=role, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method assign_role_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} - if _params['user']: - _path_params['user'] = _params['user'] + def _assign_role_handler_serialize( + self, + user, + role, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None - if _params['role']: - _path_params['role'] = _params['role'] + _collection_formats: Dict[str, str] = { + } + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + # process the path parameters + if user is not None: + _path_params['user'] = user + if role is not None: + _path_params['role'] = role # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = {} - return self.api_client.call_api( - '/users/{user}/roles/{role}', 'POST', - _path_params, - _query_params, - _header_params, + + + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/users/{user}/roles/{role}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) - @validate_arguments - def computation_quota_handler(self, computation : Annotated[StrictStr, Field(..., description="Computation id")], **kwargs) -> List[OperatorQuota]: # noqa: E501 - """Retrieves the quota used by computation with the given computation id # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.computation_quota_handler(computation, async_req=True) - >>> result = thread.get() + + @validate_call + def computation_quota_handler( + self, + computation: Annotated[StrictStr, Field(description="Computation id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> List[OperatorQuota]: + """Retrieves the quota used by computation with the given computation id + :param computation: Computation id (required) :type computation: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: List[OperatorQuota] - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the computation_quota_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.computation_quota_handler_with_http_info(computation, **kwargs) # noqa: E501 - - @validate_arguments - def computation_quota_handler_with_http_info(self, computation : Annotated[StrictStr, Field(..., description="Computation id")], **kwargs) -> ApiResponse: # noqa: E501 - """Retrieves the quota used by computation with the given computation id # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.computation_quota_handler_with_http_info(computation, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._computation_quota_handler_serialize( + computation=computation, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[OperatorQuota]", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def computation_quota_handler_with_http_info( + self, + computation: Annotated[StrictStr, Field(description="Computation id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[List[OperatorQuota]]: + """Retrieves the quota used by computation with the given computation id + :param computation: Computation id (required) :type computation: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(List[OperatorQuota], status_code(int), headers(HTTPHeaderDict)) - """ + """ # noqa: E501 + + _param = self._computation_quota_handler_serialize( + computation=computation, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[OperatorQuota]", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _params = locals() - _all_params = [ - 'computation' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + @validate_call + def computation_quota_handler_without_preload_content( + self, + computation: Annotated[StrictStr, Field(description="Computation id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Retrieves the quota used by computation with the given computation id + + + :param computation: Computation id (required) + :type computation: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._computation_quota_handler_serialize( + computation=computation, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method computation_quota_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[OperatorQuota]", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} - if _params['computation']: - _path_params['computation'] = _params['computation'] + def _computation_quota_handler_serialize( + self, + computation, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + # process the path parameters + if computation is not None: + _path_params['computation'] = computation # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None + + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = { - '200': "List[OperatorQuota]", - } + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/quota/computations/{computation}', 'GET', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='GET', + resource_path='/quota/computations/{computation}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + + - @validate_arguments - def computations_quota_handler(self, offset : conint(strict=True, ge=0), limit : conint(strict=True, ge=0), **kwargs) -> List[ComputationQuota]: # noqa: E501 - """Retrieves the quota used by computations # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True + @validate_call + def computations_quota_handler( + self, + offset: Annotated[int, Field(strict=True, ge=0)], + limit: Annotated[int, Field(strict=True, ge=0)], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> List[ComputationQuota]: + """Retrieves the quota used by computations - >>> thread = api.computations_quota_handler(offset, limit, async_req=True) - >>> result = thread.get() :param offset: (required) :type offset: int :param limit: (required) :type limit: int - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: List[ComputationQuota] - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the computations_quota_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.computations_quota_handler_with_http_info(offset, limit, **kwargs) # noqa: E501 - - @validate_arguments - def computations_quota_handler_with_http_info(self, offset : conint(strict=True, ge=0), limit : conint(strict=True, ge=0), **kwargs) -> ApiResponse: # noqa: E501 - """Retrieves the quota used by computations # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.computations_quota_handler_with_http_info(offset, limit, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._computations_quota_handler_serialize( + offset=offset, + limit=limit, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[ComputationQuota]", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def computations_quota_handler_with_http_info( + self, + offset: Annotated[int, Field(strict=True, ge=0)], + limit: Annotated[int, Field(strict=True, ge=0)], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[List[ComputationQuota]]: + """Retrieves the quota used by computations + :param offset: (required) :type offset: int :param limit: (required) :type limit: int - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(List[ComputationQuota], status_code(int), headers(HTTPHeaderDict)) - """ - - _params = locals() - - _all_params = [ - 'offset', - 'limit' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' - ] + """ # noqa: E501 + + _param = self._computations_quota_handler_serialize( + offset=offset, + limit=limit, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method computations_quota_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] - - _collection_formats = {} - - # process the path parameters - _path_params = {} - - # process the query parameters - _query_params = [] - if _params.get('offset') is not None: # noqa: E501 - _query_params.append(('offset', _params['offset'])) - - if _params.get('limit') is not None: # noqa: E501 - _query_params.append(('limit', _params['limit'])) - - # process the header parameters - _header_params = dict(_params.get('_headers', {})) - # process the form parameters - _form_params = [] - _files = {} - # process the body parameter - _body_params = None - # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 - - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - - _response_types_map = { + _response_types_map: Dict[str, Optional[str]] = { '200': "List[ComputationQuota]", } - - return self.api_client.call_api( - '/quota/computations', 'GET', - _path_params, - _query_params, - _header_params, - body=_body_params, - post_params=_form_params, - files=_files, + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, response_types_map=_response_types_map, - auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), - collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) - - @validate_arguments - def data_usage_handler(self, offset : conint(strict=True, ge=0), limit : conint(strict=True, ge=0), **kwargs) -> List[DataUsage]: # noqa: E501 - """Retrieves the data usage # noqa: E501 + ) - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.data_usage_handler(offset, limit, async_req=True) - >>> result = thread.get() + @validate_call + def computations_quota_handler_without_preload_content( + self, + offset: Annotated[int, Field(strict=True, ge=0)], + limit: Annotated[int, Field(strict=True, ge=0)], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Retrieves the quota used by computations - :param offset: (required) - :type offset: int - :param limit: (required) - :type limit: int - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: List[DataUsage] - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the data_usage_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.data_usage_handler_with_http_info(offset, limit, **kwargs) # noqa: E501 - - @validate_arguments - def data_usage_handler_with_http_info(self, offset : conint(strict=True, ge=0), limit : conint(strict=True, ge=0), **kwargs) -> ApiResponse: # noqa: E501 - """Retrieves the data usage # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.data_usage_handler_with_http_info(offset, limit, async_req=True) - >>> result = thread.get() :param offset: (required) :type offset: int :param limit: (required) :type limit: int - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(List[DataUsage], status_code(int), headers(HTTPHeaderDict)) - """ - - _params = locals() + """ # noqa: E501 + + _param = self._computations_quota_handler_serialize( + offset=offset, + limit=limit, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - _all_params = [ - 'offset', - 'limit' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' - ] + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[ComputationQuota]", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) + return response_data.response - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method data_usage_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] - _collection_formats = {} + def _computations_quota_handler_serialize( + self, + offset, + limit, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: - # process the path parameters - _path_params = {} + _host = None - # process the query parameters - _query_params = [] - if _params.get('offset') is not None: # noqa: E501 - _query_params.append(('offset', _params['offset'])) + _collection_formats: Dict[str, str] = { + } - if _params.get('limit') is not None: # noqa: E501 - _query_params.append(('limit', _params['limit'])) + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + # process the path parameters + # process the query parameters + if offset is not None: + + _query_params.append(('offset', offset)) + + if limit is not None: + + _query_params.append(('limit', limit)) + # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None + + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = { - '200': "List[DataUsage]", - } + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/quota/dataUsage', 'GET', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='GET', + resource_path='/quota/computations', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) - @validate_arguments - def data_usage_summary_handler(self, granularity : UsageSummaryGranularity, offset : conint(strict=True, ge=0), limit : conint(strict=True, ge=0), dataset : Optional[StrictStr] = None, **kwargs) -> List[DataUsageSummary]: # noqa: E501 - """Retrieves the data usage summary # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.data_usage_summary_handler(granularity, offset, limit, dataset, async_req=True) - >>> result = thread.get() - :param granularity: (required) - :type granularity: UsageSummaryGranularity - :param offset: (required) - :type offset: int - :param limit: (required) - :type limit: int - :param dataset: - :type dataset: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: List[DataUsageSummary] - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the data_usage_summary_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.data_usage_summary_handler_with_http_info(granularity, offset, limit, dataset, **kwargs) # noqa: E501 - - @validate_arguments - def data_usage_summary_handler_with_http_info(self, granularity : UsageSummaryGranularity, offset : conint(strict=True, ge=0), limit : conint(strict=True, ge=0), dataset : Optional[StrictStr] = None, **kwargs) -> ApiResponse: # noqa: E501 - """Retrieves the data usage summary # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.data_usage_summary_handler_with_http_info(granularity, offset, limit, dataset, async_req=True) - >>> result = thread.get() + @validate_call + def data_usage_handler( + self, + offset: Annotated[int, Field(strict=True, ge=0)], + limit: Annotated[int, Field(strict=True, ge=0)], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> List[DataUsage]: + """Retrieves the data usage + - :param granularity: (required) - :type granularity: UsageSummaryGranularity :param offset: (required) :type offset: int :param limit: (required) :type limit: int - :param dataset: - :type dataset: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(List[DataUsageSummary], status_code(int), headers(HTTPHeaderDict)) - """ - - _params = locals() - - _all_params = [ - 'granularity', - 'offset', - 'limit', - 'dataset' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' - ] + """ # noqa: E501 + + _param = self._data_usage_handler_serialize( + offset=offset, + limit=limit, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method data_usage_summary_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] - - _collection_formats = {} - - # process the path parameters - _path_params = {} - - # process the query parameters - _query_params = [] - if _params.get('granularity') is not None: # noqa: E501 - _query_params.append(('granularity', _params['granularity'].value)) + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[DataUsage]", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def data_usage_handler_with_http_info( + self, + offset: Annotated[int, Field(strict=True, ge=0)], + limit: Annotated[int, Field(strict=True, ge=0)], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[List[DataUsage]]: + """Retrieves the data usage + + + :param offset: (required) + :type offset: int + :param limit: (required) + :type limit: int + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._data_usage_handler_serialize( + offset=offset, + limit=limit, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[DataUsage]", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def data_usage_handler_without_preload_content( + self, + offset: Annotated[int, Field(strict=True, ge=0)], + limit: Annotated[int, Field(strict=True, ge=0)], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Retrieves the data usage - if _params.get('offset') is not None: # noqa: E501 - _query_params.append(('offset', _params['offset'])) - if _params.get('limit') is not None: # noqa: E501 - _query_params.append(('limit', _params['limit'])) + :param offset: (required) + :type offset: int + :param limit: (required) + :type limit: int + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._data_usage_handler_serialize( + offset=offset, + limit=limit, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[DataUsage]", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + - if _params.get('dataset') is not None: # noqa: E501 - _query_params.append(('dataset', _params['dataset'])) + def _data_usage_handler_serialize( + self, + offset, + limit, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if offset is not None: + + _query_params.append(('offset', offset)) + + if limit is not None: + + _query_params.append(('limit', limit)) + # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None + + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + # authentication setting - _auth_settings = ['session_token'] # noqa: E501 + _auth_settings: List[str] = [ + 'session_token' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/quota/dataUsage', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def data_usage_summary_handler( + self, + granularity: UsageSummaryGranularity, + offset: Annotated[int, Field(strict=True, ge=0)], + limit: Annotated[int, Field(strict=True, ge=0)], + dataset: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> List[DataUsageSummary]: + """Retrieves the data usage summary + - _response_types_map = { + :param granularity: (required) + :type granularity: UsageSummaryGranularity + :param offset: (required) + :type offset: int + :param limit: (required) + :type limit: int + :param dataset: + :type dataset: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._data_usage_summary_handler_serialize( + granularity=granularity, + offset=offset, + limit=limit, + dataset=dataset, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { '200': "List[DataUsageSummary]", } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def data_usage_summary_handler_with_http_info( + self, + granularity: UsageSummaryGranularity, + offset: Annotated[int, Field(strict=True, ge=0)], + limit: Annotated[int, Field(strict=True, ge=0)], + dataset: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[List[DataUsageSummary]]: + """Retrieves the data usage summary - return self.api_client.call_api( - '/quota/dataUsage/summary', 'GET', - _path_params, - _query_params, - _header_params, + + :param granularity: (required) + :type granularity: UsageSummaryGranularity + :param offset: (required) + :type offset: int + :param limit: (required) + :type limit: int + :param dataset: + :type dataset: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._data_usage_summary_handler_serialize( + granularity=granularity, + offset=offset, + limit=limit, + dataset=dataset, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[DataUsageSummary]", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def data_usage_summary_handler_without_preload_content( + self, + granularity: UsageSummaryGranularity, + offset: Annotated[int, Field(strict=True, ge=0)], + limit: Annotated[int, Field(strict=True, ge=0)], + dataset: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Retrieves the data usage summary + + + :param granularity: (required) + :type granularity: UsageSummaryGranularity + :param offset: (required) + :type offset: int + :param limit: (required) + :type limit: int + :param dataset: + :type dataset: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._data_usage_summary_handler_serialize( + granularity=granularity, + offset=offset, + limit=limit, + dataset=dataset, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[DataUsageSummary]", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _data_usage_summary_handler_serialize( + self, + granularity, + offset, + limit, + dataset, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if granularity is not None: + + _query_params.append(('granularity', granularity.value)) + + if offset is not None: + + _query_params.append(('offset', offset)) + + if limit is not None: + + _query_params.append(('limit', limit)) + + if dataset is not None: + + _query_params.append(('dataset', dataset)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/quota/dataUsage/summary', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) - @validate_arguments - def get_role_by_name_handler(self, name : Annotated[StrictStr, Field(..., description="Role Name")], **kwargs) -> AddCollection200Response: # noqa: E501 - """Get role by name # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_role_by_name_handler(name, async_req=True) - >>> result = thread.get() + + @validate_call + def get_role_by_name_handler( + self, + name: Annotated[StrictStr, Field(description="Role Name")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> InlineObject2: + """Get role by name + :param name: Role Name (required) :type name: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: AddCollection200Response - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the get_role_by_name_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.get_role_by_name_handler_with_http_info(name, **kwargs) # noqa: E501 - - @validate_arguments - def get_role_by_name_handler_with_http_info(self, name : Annotated[StrictStr, Field(..., description="Role Name")], **kwargs) -> ApiResponse: # noqa: E501 - """Get role by name # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.get_role_by_name_handler_with_http_info(name, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._get_role_by_name_handler_serialize( + name=name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "InlineObject2", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_role_by_name_handler_with_http_info( + self, + name: Annotated[StrictStr, Field(description="Role Name")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[InlineObject2]: + """Get role by name + :param name: Role Name (required) :type name: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_role_by_name_handler_serialize( + name=name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "InlineObject2", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_role_by_name_handler_without_preload_content( + self, + name: Annotated[StrictStr, Field(description="Role Name")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get role by name + + + :param name: Role Name (required) + :type name: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_role_by_name_handler_serialize( + name=name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "InlineObject2", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_role_by_name_handler_serialize( + self, + name, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if name is not None: + _path_params['name'] = name + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/roles/byName/{name}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_role_descriptions( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> List[RoleDescription]: + """Query roles for the current user. + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_role_descriptions_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[RoleDescription]", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_role_descriptions_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[List[RoleDescription]]: + """Query roles for the current user. + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_role_descriptions_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[RoleDescription]", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_role_descriptions_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Query roles for the current user. + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_role_descriptions_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[RoleDescription]", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_role_descriptions_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/user/roles/descriptions', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_user_quota_handler( + self, + user: Annotated[StrictStr, Field(description="User id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Quota: + """Retrieves the available and used quota of a specific user. + + + :param user: User id (required) + :type user: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_user_quota_handler_serialize( + user=user, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Quota", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_user_quota_handler_with_http_info( + self, + user: Annotated[StrictStr, Field(description="User id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[Quota]: + """Retrieves the available and used quota of a specific user. + + + :param user: User id (required) + :type user: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_user_quota_handler_serialize( + user=user, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Quota", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_user_quota_handler_without_preload_content( + self, + user: Annotated[StrictStr, Field(description="User id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Retrieves the available and used quota of a specific user. + + + :param user: User id (required) + :type user: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(AddCollection200Response, status_code(int), headers(HTTPHeaderDict)) - """ - - _params = locals() + """ # noqa: E501 + + _param = self._get_user_quota_handler_serialize( + user=user, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - _all_params = [ - 'name' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' - ] + _response_types_map: Dict[str, Optional[str]] = { + '200': "Quota", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) + return response_data.response - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method get_role_by_name_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] - _collection_formats = {} + def _get_user_quota_handler_serialize( + self, + user, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: - # process the path parameters - _path_params = {} - if _params['name']: - _path_params['name'] = _params['name'] + _host = None + + _collection_formats: Dict[str, str] = { + } + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + # process the path parameters + if user is not None: + _path_params['user'] = user # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None + + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = { - '200': "AddCollection200Response", - } + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/roles/byName/{name}', 'GET', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='GET', + resource_path='/quotas/{user}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) - @validate_arguments - def get_role_descriptions(self, **kwargs) -> List[RoleDescription]: # noqa: E501 - """Query roles for the current user. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_role_descriptions(async_req=True) - >>> result = thread.get() - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: List[RoleDescription] - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the get_role_descriptions_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.get_role_descriptions_with_http_info(**kwargs) # noqa: E501 - - @validate_arguments - def get_role_descriptions_with_http_info(self, **kwargs) -> ApiResponse: # noqa: E501 - """Query roles for the current user. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.get_role_descriptions_with_http_info(async_req=True) - >>> result = thread.get() - - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional + @validate_call + def quota_handler( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Quota: + """Retrieves the available and used quota of the current user. + + :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(List[RoleDescription], status_code(int), headers(HTTPHeaderDict)) - """ - - _params = locals() + """ # noqa: E501 - _all_params = [ - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' - ] + _param = self._quota_handler_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method get_role_descriptions" % _key - ) - _params[_key] = _val - del _params['kwargs'] - - _collection_formats = {} + _response_types_map: Dict[str, Optional[str]] = { + '200': "Quota", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def quota_handler_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[Quota]: + """Retrieves the available and used quota of the current user. - # process the path parameters - _path_params = {} - # process the query parameters - _query_params = [] - # process the header parameters - _header_params = dict(_params.get('_headers', {})) - # process the form parameters - _form_params = [] - _files = {} - # process the body parameter - _body_params = None - # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 + _param = self._quota_handler_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - _response_types_map = { - '200': "List[RoleDescription]", + _response_types_map: Dict[str, Optional[str]] = { + '200': "Quota", } - - return self.api_client.call_api( - '/user/roles/descriptions', 'GET', - _path_params, - _query_params, - _header_params, - body=_body_params, - post_params=_form_params, - files=_files, + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, response_types_map=_response_types_map, - auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), - collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) - - @validate_arguments - def get_user_quota_handler(self, user : Annotated[StrictStr, Field(..., description="User id")], **kwargs) -> Quota: # noqa: E501 - """Retrieves the available and used quota of a specific user. # noqa: E501 + ) - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_user_quota_handler(user, async_req=True) - >>> result = thread.get() + @validate_call + def quota_handler_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Retrieves the available and used quota of the current user. - :param user: User id (required) - :type user: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: Quota - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the get_user_quota_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.get_user_quota_handler_with_http_info(user, **kwargs) # noqa: E501 - - @validate_arguments - def get_user_quota_handler_with_http_info(self, user : Annotated[StrictStr, Field(..., description="User id")], **kwargs) -> ApiResponse: # noqa: E501 - """Retrieves the available and used quota of a specific user. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.get_user_quota_handler_with_http_info(user, async_req=True) - >>> result = thread.get() - :param user: User id (required) - :type user: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(Quota, status_code(int), headers(HTTPHeaderDict)) - """ + """ # noqa: E501 - _params = locals() + _param = self._quota_handler_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - _all_params = [ - 'user' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' - ] + _response_types_map: Dict[str, Optional[str]] = { + '200': "Quota", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) + return response_data.response - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method get_user_quota_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] - _collection_formats = {} + def _quota_handler_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: - # process the path parameters - _path_params = {} - if _params['user']: - _path_params['user'] = _params['user'] + _host = None + + _collection_formats: Dict[str, str] = { + } + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + # process the path parameters # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None + + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = { - '200': "Quota", - } + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/quotas/{user}', 'GET', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='GET', + resource_path='/quota', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) - @validate_arguments - def quota_handler(self, **kwargs) -> Quota: # noqa: E501 - """Retrieves the available and used quota of the current user. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.quota_handler(async_req=True) - >>> result = thread.get() - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: Quota - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the quota_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.quota_handler_with_http_info(**kwargs) # noqa: E501 - - @validate_arguments - def quota_handler_with_http_info(self, **kwargs) -> ApiResponse: # noqa: E501 - """Retrieves the available and used quota of the current user. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.quota_handler_with_http_info(async_req=True) - >>> result = thread.get() - - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional + @validate_call + def remove_role_handler( + self, + role: Annotated[StrictStr, Field(description="Role id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Remove a role. Requires admin privilige. + + + :param role: Role id (required) + :type role: str :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(Quota, status_code(int), headers(HTTPHeaderDict)) - """ - - _params = locals() - - _all_params = [ - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' - ] + """ # noqa: E501 + + _param = self._remove_role_handler_serialize( + role=role, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method quota_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] - - _collection_formats = {} - - # process the path parameters - _path_params = {} + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def remove_role_handler_with_http_info( + self, + role: Annotated[StrictStr, Field(description="Role id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Remove a role. Requires admin privilige. - # process the query parameters - _query_params = [] - # process the header parameters - _header_params = dict(_params.get('_headers', {})) - # process the form parameters - _form_params = [] - _files = {} - # process the body parameter - _body_params = None - # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 + :param role: Role id (required) + :type role: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._remove_role_handler_serialize( + role=role, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - _response_types_map = { - '200': "Quota", + _response_types_map: Dict[str, Optional[str]] = { + '200': None, } - - return self.api_client.call_api( - '/quota', 'GET', - _path_params, - _query_params, - _header_params, - body=_body_params, - post_params=_form_params, - files=_files, + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, response_types_map=_response_types_map, - auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), - collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) - - @validate_arguments - def remove_role_handler(self, role : Annotated[StrictStr, Field(..., description="Role id")], **kwargs) -> None: # noqa: E501 - """Remove a role. Requires admin privilige. # noqa: E501 + ) - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.remove_role_handler(role, async_req=True) - >>> result = thread.get() + @validate_call + def remove_role_handler_without_preload_content( + self, + role: Annotated[StrictStr, Field(description="Role id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Remove a role. Requires admin privilige. - :param role: Role id (required) - :type role: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the remove_role_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.remove_role_handler_with_http_info(role, **kwargs) # noqa: E501 - - @validate_arguments - def remove_role_handler_with_http_info(self, role : Annotated[StrictStr, Field(..., description="Role id")], **kwargs) -> ApiResponse: # noqa: E501 - """Remove a role. Requires admin privilige. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.remove_role_handler_with_http_info(role, async_req=True) - >>> result = thread.get() :param role: Role id (required) :type role: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ - - _params = locals() + """ # noqa: E501 + + _param = self._remove_role_handler_serialize( + role=role, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - _all_params = [ - 'role' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' - ] + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout ) + return response_data.response - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method remove_role_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] - _collection_formats = {} + def _remove_role_handler_serialize( + self, + role, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: - # process the path parameters - _path_params = {} - if _params['role']: - _path_params['role'] = _params['role'] + _host = None + + _collection_formats: Dict[str, str] = { + } + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + # process the path parameters + if role is not None: + _path_params['role'] = role # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = {} - return self.api_client.call_api( - '/roles/{role}', 'DELETE', - _path_params, - _query_params, - _header_params, + + + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/roles/{role}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) - @validate_arguments - def revoke_role_handler(self, user : Annotated[StrictStr, Field(..., description="User id")], role : Annotated[StrictStr, Field(..., description="Role id")], **kwargs) -> None: # noqa: E501 - """Revoke a role from a user. Requires admin privilige. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.revoke_role_handler(user, role, async_req=True) - >>> result = thread.get() + + @validate_call + def revoke_role_handler( + self, + user: Annotated[StrictStr, Field(description="User id")], + role: Annotated[StrictStr, Field(description="Role id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Revoke a role from a user. Requires admin privilige. + :param user: User id (required) :type user: str :param role: Role id (required) :type role: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the revoke_role_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.revoke_role_handler_with_http_info(user, role, **kwargs) # noqa: E501 - - @validate_arguments - def revoke_role_handler_with_http_info(self, user : Annotated[StrictStr, Field(..., description="User id")], role : Annotated[StrictStr, Field(..., description="Role id")], **kwargs) -> ApiResponse: # noqa: E501 - """Revoke a role from a user. Requires admin privilige. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.revoke_role_handler_with_http_info(user, role, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._revoke_role_handler_serialize( + user=user, + role=role, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def revoke_role_handler_with_http_info( + self, + user: Annotated[StrictStr, Field(description="User id")], + role: Annotated[StrictStr, Field(description="Role id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Revoke a role from a user. Requires admin privilige. + :param user: User id (required) :type user: str :param role: Role id (required) :type role: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ + """ # noqa: E501 + + _param = self._revoke_role_handler_serialize( + user=user, + role=role, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _params = locals() - _all_params = [ - 'user', - 'role' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + @validate_call + def revoke_role_handler_without_preload_content( + self, + user: Annotated[StrictStr, Field(description="User id")], + role: Annotated[StrictStr, Field(description="Role id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Revoke a role from a user. Requires admin privilige. + + + :param user: User id (required) + :type user: str + :param role: Role id (required) + :type role: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._revoke_role_handler_serialize( + user=user, + role=role, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method revoke_role_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} - if _params['user']: - _path_params['user'] = _params['user'] + def _revoke_role_handler_serialize( + self, + user, + role, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: - if _params['role']: - _path_params['role'] = _params['role'] + _host = None + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + # process the path parameters + if user is not None: + _path_params['user'] = user + if role is not None: + _path_params['role'] = role # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = {} - return self.api_client.call_api( - '/users/{user}/roles/{role}', 'DELETE', - _path_params, - _query_params, - _header_params, + + + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/users/{user}/roles/{role}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + - @validate_arguments - def update_user_quota_handler(self, user : Annotated[StrictStr, Field(..., description="User id")], update_quota : UpdateQuota, **kwargs) -> None: # noqa: E501 - """Update the available quota of a specific user. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.update_user_quota_handler(user, update_quota, async_req=True) - >>> result = thread.get() + @validate_call + def update_user_quota_handler( + self, + user: Annotated[StrictStr, Field(description="User id")], + update_quota: UpdateQuota, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Update the available quota of a specific user. + :param user: User id (required) :type user: str :param update_quota: (required) :type update_quota: UpdateQuota - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the update_user_quota_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.update_user_quota_handler_with_http_info(user, update_quota, **kwargs) # noqa: E501 - - @validate_arguments - def update_user_quota_handler_with_http_info(self, user : Annotated[StrictStr, Field(..., description="User id")], update_quota : UpdateQuota, **kwargs) -> ApiResponse: # noqa: E501 - """Update the available quota of a specific user. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.update_user_quota_handler_with_http_info(user, update_quota, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._update_user_quota_handler_serialize( + user=user, + update_quota=update_quota, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def update_user_quota_handler_with_http_info( + self, + user: Annotated[StrictStr, Field(description="User id")], + update_quota: UpdateQuota, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Update the available quota of a specific user. + :param user: User id (required) :type user: str :param update_quota: (required) :type update_quota: UpdateQuota - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ + """ # noqa: E501 + + _param = self._update_user_quota_handler_serialize( + user=user, + update_quota=update_quota, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _params = locals() - _all_params = [ - 'user', - 'update_quota' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + @validate_call + def update_user_quota_handler_without_preload_content( + self, + user: Annotated[StrictStr, Field(description="User id")], + update_quota: UpdateQuota, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Update the available quota of a specific user. + + + :param user: User id (required) + :type user: str + :param update_quota: (required) + :type update_quota: UpdateQuota + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_user_quota_handler_serialize( + user=user, + update_quota=update_quota, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method update_user_quota_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} - if _params['user']: - _path_params['user'] = _params['user'] + def _update_user_quota_handler_serialize( + self, + user, + update_quota, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + # process the path parameters + if user is not None: + _path_params['user'] = user # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None - if _params['update_quota'] is not None: - _body_params = _params['update_quota'] + if update_quota is not None: + _body_params = update_quota + + # set the HTTP header `Content-Type` - _content_types_list = _params.get('_content_type', - self.api_client.select_header_content_type( - ['application/json'])) - if _content_types_list: - _header_params['Content-Type'] = _content_types_list + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - - _response_types_map = {} + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/quotas/{user}', 'POST', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='POST', + resource_path='/quotas/{user}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/python/geoengine_openapi_client/api/workflows_api.py b/python/geoengine_openapi_client/api/workflows_api.py index c59d6eaf..7ae7b371 100644 --- a/python/geoengine_openapi_client/api/workflows_api.py +++ b/python/geoengine_openapi_client/api/workflows_api.py @@ -12,19 +12,15 @@ Do not edit the class manually. """ # noqa: E501 - -import re # noqa: F401 -import io import warnings - -from pydantic import validate_arguments, ValidationError - +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union from typing_extensions import Annotated -from pydantic import Field, StrictStr -from typing import List, Union - -from geoengine_openapi_client.models.add_collection200_response import AddCollection200Response +from pydantic import Field, StrictStr +from typing import List +from typing_extensions import Annotated +from geoengine_openapi_client.models.inline_object2 import InlineObject2 from geoengine_openapi_client.models.provenance_entry import ProvenanceEntry from geoengine_openapi_client.models.raster_dataset_from_workflow import RasterDatasetFromWorkflow from geoengine_openapi_client.models.raster_stream_websocket_result_type import RasterStreamWebsocketResultType @@ -34,12 +30,9 @@ from geoengine_openapi_client.models.typed_result_descriptor import TypedResultDescriptor from geoengine_openapi_client.models.workflow import Workflow -from geoengine_openapi_client.api_client import ApiClient +from geoengine_openapi_client.api_client import ApiClient, RequestSerialized from geoengine_openapi_client.api_response import ApiResponse -from geoengine_openapi_client.exceptions import ( # noqa: F401 - ApiTypeError, - ApiValueError -) +from geoengine_openapi_client.rest import RESTResponseType class WorkflowsApi: @@ -54,722 +47,1349 @@ def __init__(self, api_client=None) -> None: api_client = ApiClient.get_default() self.api_client = api_client - @validate_arguments - def dataset_from_workflow_handler(self, id : Annotated[StrictStr, Field(..., description="Workflow id")], raster_dataset_from_workflow : RasterDatasetFromWorkflow, **kwargs) -> TaskResponse: # noqa: E501 - """Create a task for creating a new dataset from the result of the workflow given by its `id` and the dataset parameters in the request body. # noqa: E501 - Returns the id of the created task # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True + @validate_call + def dataset_from_workflow_handler( + self, + id: Annotated[StrictStr, Field(description="Workflow id")], + raster_dataset_from_workflow: RasterDatasetFromWorkflow, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> TaskResponse: + """Create a task for creating a new dataset from the result of the workflow given by its `id` and the dataset parameters in the request body. Returns the id of the created task - >>> thread = api.dataset_from_workflow_handler(id, raster_dataset_from_workflow, async_req=True) - >>> result = thread.get() :param id: Workflow id (required) :type id: str :param raster_dataset_from_workflow: (required) :type raster_dataset_from_workflow: RasterDatasetFromWorkflow - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: TaskResponse - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the dataset_from_workflow_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.dataset_from_workflow_handler_with_http_info(id, raster_dataset_from_workflow, **kwargs) # noqa: E501 - - @validate_arguments - def dataset_from_workflow_handler_with_http_info(self, id : Annotated[StrictStr, Field(..., description="Workflow id")], raster_dataset_from_workflow : RasterDatasetFromWorkflow, **kwargs) -> ApiResponse: # noqa: E501 - """Create a task for creating a new dataset from the result of the workflow given by its `id` and the dataset parameters in the request body. # noqa: E501 - - Returns the id of the created task # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.dataset_from_workflow_handler_with_http_info(id, raster_dataset_from_workflow, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._dataset_from_workflow_handler_serialize( + id=id, + raster_dataset_from_workflow=raster_dataset_from_workflow, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "TaskResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def dataset_from_workflow_handler_with_http_info( + self, + id: Annotated[StrictStr, Field(description="Workflow id")], + raster_dataset_from_workflow: RasterDatasetFromWorkflow, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[TaskResponse]: + """Create a task for creating a new dataset from the result of the workflow given by its `id` and the dataset parameters in the request body. Returns the id of the created task + :param id: Workflow id (required) :type id: str :param raster_dataset_from_workflow: (required) :type raster_dataset_from_workflow: RasterDatasetFromWorkflow - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(TaskResponse, status_code(int), headers(HTTPHeaderDict)) - """ + """ # noqa: E501 + + _param = self._dataset_from_workflow_handler_serialize( + id=id, + raster_dataset_from_workflow=raster_dataset_from_workflow, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - _params = locals() + _response_types_map: Dict[str, Optional[str]] = { + '200': "TaskResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _all_params = [ - 'id', - 'raster_dataset_from_workflow' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + + @validate_call + def dataset_from_workflow_handler_without_preload_content( + self, + id: Annotated[StrictStr, Field(description="Workflow id")], + raster_dataset_from_workflow: RasterDatasetFromWorkflow, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Create a task for creating a new dataset from the result of the workflow given by its `id` and the dataset parameters in the request body. Returns the id of the created task + + + :param id: Workflow id (required) + :type id: str + :param raster_dataset_from_workflow: (required) + :type raster_dataset_from_workflow: RasterDatasetFromWorkflow + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._dataset_from_workflow_handler_serialize( + id=id, + raster_dataset_from_workflow=raster_dataset_from_workflow, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method dataset_from_workflow_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': "TaskResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} - if _params['id']: - _path_params['id'] = _params['id'] + def _dataset_from_workflow_handler_serialize( + self, + id, + raster_dataset_from_workflow, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + _host = None + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if id is not None: + _path_params['id'] = id # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None - if _params['raster_dataset_from_workflow'] is not None: - _body_params = _params['raster_dataset_from_workflow'] + if raster_dataset_from_workflow is not None: + _body_params = raster_dataset_from_workflow + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) # set the HTTP header `Content-Type` - _content_types_list = _params.get('_content_type', - self.api_client.select_header_content_type( - ['application/json'])) - if _content_types_list: - _header_params['Content-Type'] = _content_types_list + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - - _response_types_map = { - '200': "TaskResponse", - } + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/datasetFromWorkflow/{id}', 'POST', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='POST', + resource_path='/datasetFromWorkflow/{id}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + - @validate_arguments - def get_workflow_all_metadata_zip_handler(self, id : Annotated[StrictStr, Field(..., description="Workflow id")], **kwargs) -> bytearray: # noqa: E501 - """Gets a ZIP archive of the worklow, its provenance and the output metadata. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_workflow_all_metadata_zip_handler(id, async_req=True) - >>> result = thread.get() + @validate_call + def get_workflow_all_metadata_zip_handler( + self, + id: Annotated[StrictStr, Field(description="Workflow id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> List[int]: + """Gets a ZIP archive of the worklow, its provenance and the output metadata. + :param id: Workflow id (required) :type id: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: bytearray - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the get_workflow_all_metadata_zip_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.get_workflow_all_metadata_zip_handler_with_http_info(id, **kwargs) # noqa: E501 - - @validate_arguments - def get_workflow_all_metadata_zip_handler_with_http_info(self, id : Annotated[StrictStr, Field(..., description="Workflow id")], **kwargs) -> ApiResponse: # noqa: E501 - """Gets a ZIP archive of the worklow, its provenance and the output metadata. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.get_workflow_all_metadata_zip_handler_with_http_info(id, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._get_workflow_all_metadata_zip_handler_serialize( + id=id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[int]", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_workflow_all_metadata_zip_handler_with_http_info( + self, + id: Annotated[StrictStr, Field(description="Workflow id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[List[int]]: + """Gets a ZIP archive of the worklow, its provenance and the output metadata. + :param id: Workflow id (required) :type id: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(bytearray, status_code(int), headers(HTTPHeaderDict)) - """ + """ # noqa: E501 + + _param = self._get_workflow_all_metadata_zip_handler_serialize( + id=id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[int]", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _params = locals() - _all_params = [ - 'id' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + @validate_call + def get_workflow_all_metadata_zip_handler_without_preload_content( + self, + id: Annotated[StrictStr, Field(description="Workflow id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Gets a ZIP archive of the worklow, its provenance and the output metadata. + + + :param id: Workflow id (required) + :type id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_workflow_all_metadata_zip_handler_serialize( + id=id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method get_workflow_all_metadata_zip_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[int]", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} - if _params['id']: - _path_params['id'] = _params['id'] + def _get_workflow_all_metadata_zip_handler_serialize( + self, + id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + # process the path parameters + if id is not None: + _path_params['id'] = id # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None + + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/zip']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/zip' + ] + ) - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = { - '200': "bytearray", - } + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/workflow/{id}/allMetadata/zip', 'GET', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='GET', + resource_path='/workflow/{id}/allMetadata/zip', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + + - @validate_arguments - def get_workflow_metadata_handler(self, id : Annotated[StrictStr, Field(..., description="Workflow id")], **kwargs) -> TypedResultDescriptor: # noqa: E501 - """Gets the metadata of a workflow # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True + @validate_call + def get_workflow_metadata_handler( + self, + id: Annotated[StrictStr, Field(description="Workflow id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> TypedResultDescriptor: + """Gets the metadata of a workflow - >>> thread = api.get_workflow_metadata_handler(id, async_req=True) - >>> result = thread.get() :param id: Workflow id (required) :type id: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: TypedResultDescriptor - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the get_workflow_metadata_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.get_workflow_metadata_handler_with_http_info(id, **kwargs) # noqa: E501 - - @validate_arguments - def get_workflow_metadata_handler_with_http_info(self, id : Annotated[StrictStr, Field(..., description="Workflow id")], **kwargs) -> ApiResponse: # noqa: E501 - """Gets the metadata of a workflow # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.get_workflow_metadata_handler_with_http_info(id, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._get_workflow_metadata_handler_serialize( + id=id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "TypedResultDescriptor", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_workflow_metadata_handler_with_http_info( + self, + id: Annotated[StrictStr, Field(description="Workflow id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[TypedResultDescriptor]: + """Gets the metadata of a workflow + :param id: Workflow id (required) :type id: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(TypedResultDescriptor, status_code(int), headers(HTTPHeaderDict)) - """ + """ # noqa: E501 + + _param = self._get_workflow_metadata_handler_serialize( + id=id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "TypedResultDescriptor", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _params = locals() - _all_params = [ - 'id' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + @validate_call + def get_workflow_metadata_handler_without_preload_content( + self, + id: Annotated[StrictStr, Field(description="Workflow id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Gets the metadata of a workflow + + + :param id: Workflow id (required) + :type id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_workflow_metadata_handler_serialize( + id=id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method get_workflow_metadata_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': "TypedResultDescriptor", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} - if _params['id']: - _path_params['id'] = _params['id'] + def _get_workflow_metadata_handler_serialize( + self, + id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + _host = None + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if id is not None: + _path_params['id'] = id # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None + + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = { - '200': "TypedResultDescriptor", - } + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/workflow/{id}/metadata', 'GET', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='GET', + resource_path='/workflow/{id}/metadata', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + + - @validate_arguments - def get_workflow_provenance_handler(self, id : Annotated[StrictStr, Field(..., description="Workflow id")], **kwargs) -> List[ProvenanceEntry]: # noqa: E501 - """Gets the provenance of all datasets used in a workflow. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True + @validate_call + def get_workflow_provenance_handler( + self, + id: Annotated[StrictStr, Field(description="Workflow id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> List[ProvenanceEntry]: + """Gets the provenance of all datasets used in a workflow. - >>> thread = api.get_workflow_provenance_handler(id, async_req=True) - >>> result = thread.get() :param id: Workflow id (required) :type id: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: List[ProvenanceEntry] - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the get_workflow_provenance_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.get_workflow_provenance_handler_with_http_info(id, **kwargs) # noqa: E501 - - @validate_arguments - def get_workflow_provenance_handler_with_http_info(self, id : Annotated[StrictStr, Field(..., description="Workflow id")], **kwargs) -> ApiResponse: # noqa: E501 - """Gets the provenance of all datasets used in a workflow. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.get_workflow_provenance_handler_with_http_info(id, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._get_workflow_provenance_handler_serialize( + id=id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[ProvenanceEntry]", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_workflow_provenance_handler_with_http_info( + self, + id: Annotated[StrictStr, Field(description="Workflow id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[List[ProvenanceEntry]]: + """Gets the provenance of all datasets used in a workflow. + :param id: Workflow id (required) :type id: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(List[ProvenanceEntry], status_code(int), headers(HTTPHeaderDict)) - """ + """ # noqa: E501 + + _param = self._get_workflow_provenance_handler_serialize( + id=id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[ProvenanceEntry]", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _params = locals() - _all_params = [ - 'id' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + @validate_call + def get_workflow_provenance_handler_without_preload_content( + self, + id: Annotated[StrictStr, Field(description="Workflow id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Gets the provenance of all datasets used in a workflow. + + + :param id: Workflow id (required) + :type id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_workflow_provenance_handler_serialize( + id=id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method get_workflow_provenance_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[ProvenanceEntry]", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} - if _params['id']: - _path_params['id'] = _params['id'] + def _get_workflow_provenance_handler_serialize( + self, + id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + _host = None + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if id is not None: + _path_params['id'] = id # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None + + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = { - '200': "List[ProvenanceEntry]", - } + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/workflow/{id}/provenance', 'GET', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='GET', + resource_path='/workflow/{id}/provenance', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + + - @validate_arguments - def load_workflow_handler(self, id : Annotated[StrictStr, Field(..., description="Workflow id")], **kwargs) -> Workflow: # noqa: E501 - """Retrieves an existing Workflow. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True + @validate_call + def load_workflow_handler( + self, + id: Annotated[StrictStr, Field(description="Workflow id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Workflow: + """Retrieves an existing Workflow. - >>> thread = api.load_workflow_handler(id, async_req=True) - >>> result = thread.get() :param id: Workflow id (required) :type id: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: Workflow - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the load_workflow_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.load_workflow_handler_with_http_info(id, **kwargs) # noqa: E501 - - @validate_arguments - def load_workflow_handler_with_http_info(self, id : Annotated[StrictStr, Field(..., description="Workflow id")], **kwargs) -> ApiResponse: # noqa: E501 - """Retrieves an existing Workflow. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.load_workflow_handler_with_http_info(id, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._load_workflow_handler_serialize( + id=id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Workflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def load_workflow_handler_with_http_info( + self, + id: Annotated[StrictStr, Field(description="Workflow id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[Workflow]: + """Retrieves an existing Workflow. + :param id: Workflow id (required) :type id: str - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(Workflow, status_code(int), headers(HTTPHeaderDict)) - """ + """ # noqa: E501 + + _param = self._load_workflow_handler_serialize( + id=id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Workflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _params = locals() - _all_params = [ - 'id' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + @validate_call + def load_workflow_handler_without_preload_content( + self, + id: Annotated[StrictStr, Field(description="Workflow id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Retrieves an existing Workflow. + + + :param id: Workflow id (required) + :type id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._load_workflow_handler_serialize( + id=id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method load_workflow_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': "Workflow", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} - if _params['id']: - _path_params['id'] = _params['id'] + def _load_workflow_handler_serialize( + self, + id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + _host = None + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if id is not None: + _path_params['id'] = id # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None + + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = { - '200': "Workflow", - } + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/workflow/{id}', 'GET', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='GET', + resource_path='/workflow/{id}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + + - @validate_arguments - def raster_stream_websocket(self, id : Annotated[StrictStr, Field(..., description="Workflow id")], spatial_bounds : SpatialPartition2D, time_interval : StrictStr, spatial_resolution : SpatialResolution, attributes : StrictStr, result_type : RasterStreamWebsocketResultType, **kwargs) -> None: # noqa: E501 - """Query a workflow raster result as a stream of tiles via a websocket connection. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True + @validate_call + def raster_stream_websocket( + self, + id: Annotated[StrictStr, Field(description="Workflow id")], + spatial_bounds: SpatialPartition2D, + time_interval: StrictStr, + spatial_resolution: SpatialResolution, + attributes: StrictStr, + result_type: RasterStreamWebsocketResultType, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Query a workflow raster result as a stream of tiles via a websocket connection. - >>> thread = api.raster_stream_websocket(id, spatial_bounds, time_interval, spatial_resolution, attributes, result_type, async_req=True) - >>> result = thread.get() :param id: Workflow id (required) :type id: str @@ -783,32 +1403,79 @@ def raster_stream_websocket(self, id : Annotated[StrictStr, Field(..., descripti :type attributes: str :param result_type: (required) :type result_type: RasterStreamWebsocketResultType - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the raster_stream_websocket_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.raster_stream_websocket_with_http_info(id, spatial_bounds, time_interval, spatial_resolution, attributes, result_type, **kwargs) # noqa: E501 - - @validate_arguments - def raster_stream_websocket_with_http_info(self, id : Annotated[StrictStr, Field(..., description="Workflow id")], spatial_bounds : SpatialPartition2D, time_interval : StrictStr, spatial_resolution : SpatialResolution, attributes : StrictStr, result_type : RasterStreamWebsocketResultType, **kwargs) -> ApiResponse: # noqa: E501 - """Query a workflow raster result as a stream of tiles via a websocket connection. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.raster_stream_websocket_with_http_info(id, spatial_bounds, time_interval, spatial_resolution, attributes, result_type, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._raster_stream_websocket_serialize( + id=id, + spatial_bounds=spatial_bounds, + time_interval=time_interval, + spatial_resolution=spatial_resolution, + attributes=attributes, + result_type=result_type, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '101': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def raster_stream_websocket_with_http_info( + self, + id: Annotated[StrictStr, Field(description="Workflow id")], + spatial_bounds: SpatialPartition2D, + time_interval: StrictStr, + spatial_resolution: SpatialResolution, + attributes: StrictStr, + result_type: RasterStreamWebsocketResultType, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Query a workflow raster result as a stream of tiles via a websocket connection. + :param id: Workflow id (required) :type id: str @@ -822,258 +1489,485 @@ def raster_stream_websocket_with_http_info(self, id : Annotated[StrictStr, Field :type attributes: str :param result_type: (required) :type result_type: RasterStreamWebsocketResultType - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: None - """ - - _params = locals() - - _all_params = [ - 'id', - 'spatial_bounds', - 'time_interval', - 'spatial_resolution', - 'attributes', - 'result_type' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' - ] + """ # noqa: E501 + + _param = self._raster_stream_websocket_serialize( + id=id, + spatial_bounds=spatial_bounds, + time_interval=time_interval, + spatial_resolution=spatial_resolution, + attributes=attributes, + result_type=result_type, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method raster_stream_websocket" % _key - ) - _params[_key] = _val - del _params['kwargs'] - - _collection_formats = {} - - # process the path parameters - _path_params = {} - if _params['id']: - _path_params['id'] = _params['id'] + _response_types_map: Dict[str, Optional[str]] = { + '101': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - # process the query parameters - _query_params = [] - if _params.get('spatial_bounds') is not None: # noqa: E501 - _query_params.append(('spatialBounds', _params['spatial_bounds'])) + @validate_call + def raster_stream_websocket_without_preload_content( + self, + id: Annotated[StrictStr, Field(description="Workflow id")], + spatial_bounds: SpatialPartition2D, + time_interval: StrictStr, + spatial_resolution: SpatialResolution, + attributes: StrictStr, + result_type: RasterStreamWebsocketResultType, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Query a workflow raster result as a stream of tiles via a websocket connection. - if _params.get('time_interval') is not None: # noqa: E501 - _query_params.append(('timeInterval', _params['time_interval'])) - if _params.get('spatial_resolution') is not None: # noqa: E501 - _query_params.append(('spatialResolution', _params['spatial_resolution'])) + :param id: Workflow id (required) + :type id: str + :param spatial_bounds: (required) + :type spatial_bounds: SpatialPartition2D + :param time_interval: (required) + :type time_interval: str + :param spatial_resolution: (required) + :type spatial_resolution: SpatialResolution + :param attributes: (required) + :type attributes: str + :param result_type: (required) + :type result_type: RasterStreamWebsocketResultType + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._raster_stream_websocket_serialize( + id=id, + spatial_bounds=spatial_bounds, + time_interval=time_interval, + spatial_resolution=spatial_resolution, + attributes=attributes, + result_type=result_type, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - if _params.get('attributes') is not None: # noqa: E501 - _query_params.append(('attributes', _params['attributes'])) + _response_types_map: Dict[str, Optional[str]] = { + '101': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _raster_stream_websocket_serialize( + self, + id, + spatial_bounds, + time_interval, + spatial_resolution, + attributes, + result_type, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } - if _params.get('result_type') is not None: # noqa: E501 - _query_params.append(('resultType', _params['result_type'].value)) + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + # process the path parameters + if id is not None: + _path_params['id'] = id + # process the query parameters + if spatial_bounds is not None: + + _query_params.append(('spatialBounds', spatial_bounds)) + + if time_interval is not None: + + _query_params.append(('timeInterval', time_interval)) + + if spatial_resolution is not None: + + _query_params.append(('spatialResolution', spatial_resolution)) + + if attributes is not None: + + _query_params.append(('attributes', attributes)) + + if result_type is not None: + + _query_params.append(('resultType', result_type.value)) + # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None - # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - _response_types_map = {} - return self.api_client.call_api( - '/workflow/{id}/rasterStream', 'GET', - _path_params, - _query_params, - _header_params, + + + # authentication setting + _auth_settings: List[str] = [ + 'session_token' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/workflow/{id}/rasterStream', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + - @validate_arguments - def register_workflow_handler(self, workflow : Workflow, **kwargs) -> AddCollection200Response: # noqa: E501 - """Registers a new Workflow. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.register_workflow_handler(workflow, async_req=True) - >>> result = thread.get() + @validate_call + def register_workflow_handler( + self, + workflow: Workflow, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> InlineObject2: + """Registers a new Workflow. + :param workflow: (required) :type workflow: Workflow - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _request_timeout: timeout setting for this request. - If one number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: AddCollection200Response - """ - kwargs['_return_http_data_only'] = True - if '_preload_content' in kwargs: - message = "Error! Please call the register_workflow_handler_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 - raise ValueError(message) - return self.register_workflow_handler_with_http_info(workflow, **kwargs) # noqa: E501 - - @validate_arguments - def register_workflow_handler_with_http_info(self, workflow : Workflow, **kwargs) -> ApiResponse: # noqa: E501 - """Registers a new Workflow. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.register_workflow_handler_with_http_info(workflow, async_req=True) - >>> result = thread.get() + """ # noqa: E501 + + _param = self._register_workflow_handler_serialize( + workflow=workflow, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "InlineObject2", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def register_workflow_handler_with_http_info( + self, + workflow: Workflow, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[InlineObject2]: + """Registers a new Workflow. + :param workflow: (required) :type workflow: Workflow - :param async_req: Whether to execute the request asynchronously. - :type async_req: bool, optional - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :type _preload_content: bool, optional - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :type _return_http_data_only: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. + request; this effectively ignores the + authentication in the spec for a single request. :type _request_auth: dict, optional - :type _content_type: string, optional: force content-type for the request + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional :return: Returns the result object. - If the method is called asynchronously, - returns the request thread. - :rtype: tuple(AddCollection200Response, status_code(int), headers(HTTPHeaderDict)) - """ + """ # noqa: E501 + + _param = self._register_workflow_handler_serialize( + workflow=workflow, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) - _params = locals() + _response_types_map: Dict[str, Optional[str]] = { + '200': "InlineObject2", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) - _all_params = [ - 'workflow' - ] - _all_params.extend( - [ - 'async_req', - '_return_http_data_only', - '_preload_content', - '_request_timeout', - '_request_auth', - '_content_type', - '_headers' + + @validate_call + def register_workflow_handler_without_preload_content( + self, + workflow: Workflow, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Registers a new Workflow. + + + :param workflow: (required) + :type workflow: Workflow + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._register_workflow_handler_serialize( + workflow=workflow, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index ) - # validate the arguments - for _key, _val in _params['kwargs'].items(): - if _key not in _all_params: - raise ApiTypeError( - "Got an unexpected keyword argument '%s'" - " to method register_workflow_handler" % _key - ) - _params[_key] = _val - del _params['kwargs'] + _response_types_map: Dict[str, Optional[str]] = { + '200': "InlineObject2", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response - _collection_formats = {} - # process the path parameters - _path_params = {} + def _register_workflow_handler_serialize( + self, + workflow, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + # process the path parameters # process the query parameters - _query_params = [] # process the header parameters - _header_params = dict(_params.get('_headers', {})) # process the form parameters - _form_params = [] - _files = {} # process the body parameter - _body_params = None - if _params['workflow'] is not None: - _body_params = _params['workflow'] + if workflow is not None: + _body_params = workflow + # set the HTTP header `Accept` - _header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) # set the HTTP header `Content-Type` - _content_types_list = _params.get('_content_type', - self.api_client.select_header_content_type( - ['application/json'])) - if _content_types_list: - _header_params['Content-Type'] = _content_types_list + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type # authentication setting - _auth_settings = ['session_token'] # noqa: E501 - - _response_types_map = { - '200': "AddCollection200Response", - } + _auth_settings: List[str] = [ + 'session_token' + ] - return self.api_client.call_api( - '/workflow', 'POST', - _path_params, - _query_params, - _header_params, + return self.api_client.param_serialize( + method='POST', + resource_path='/workflow', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, body=_body_params, post_params=_form_params, files=_files, - response_types_map=_response_types_map, auth_settings=_auth_settings, - async_req=_params.get('async_req'), - _return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 - _preload_content=_params.get('_preload_content', True), - _request_timeout=_params.get('_request_timeout'), collection_formats=_collection_formats, - _request_auth=_params.get('_request_auth')) + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/python/geoengine_openapi_client/api_client.py b/python/geoengine_openapi_client/api_client.py index 50c09422..03029367 100644 --- a/python/geoengine_openapi_client/api_client.py +++ b/python/geoengine_openapi_client/api_client.py @@ -12,24 +12,35 @@ """ # noqa: E501 -import atexit import datetime from dateutil.parser import parse +from enum import Enum +import decimal import json import mimetypes -from multiprocessing.pool import ThreadPool import os import re import tempfile from urllib.parse import quote +from typing import Tuple, Optional, List, Dict, Union +from pydantic import SecretStr from geoengine_openapi_client.configuration import Configuration -from geoengine_openapi_client.api_response import ApiResponse +from geoengine_openapi_client.api_response import ApiResponse, T as ApiResponseT import geoengine_openapi_client.models from geoengine_openapi_client import rest -from geoengine_openapi_client.exceptions import ApiValueError, ApiException - +from geoengine_openapi_client.exceptions import ( + ApiValueError, + ApiException, + BadRequestException, + UnauthorizedException, + ForbiddenException, + NotFoundException, + ServiceException +) + +RequestSerialized = Tuple[str, str, Dict[str, str], Optional[str], List[str]] class ApiClient: """Generic API client for OpenAPI client library builds. @@ -45,8 +56,6 @@ class ApiClient: the API. :param cookie: a cookie to include in the header when making calls to the API - :param pool_threads: The number of threads to use for async requests - to the API. More threads means more concurrent API requests. """ PRIMITIVE_TYPES = (float, bool, bytes, str, int) @@ -58,17 +67,22 @@ class ApiClient: 'bool': bool, 'date': datetime.date, 'datetime': datetime.datetime, + 'decimal': decimal.Decimal, 'object': object, } _pool = None - def __init__(self, configuration=None, header_name=None, header_value=None, - cookie=None, pool_threads=1) -> None: + def __init__( + self, + configuration=None, + header_name=None, + header_value=None, + cookie=None + ) -> None: # use default configuration if none is provided if configuration is None: configuration = Configuration.get_default() self.configuration = configuration - self.pool_threads = pool_threads self.rest_client = rest.RESTClientObject(configuration) self.default_headers = {} @@ -83,25 +97,7 @@ def __enter__(self): return self def __exit__(self, exc_type, exc_value, traceback): - self.close() - - def close(self): - if self._pool: - self._pool.close() - self._pool.join() - self._pool = None - if hasattr(atexit, 'unregister'): - atexit.unregister(self.close) - - @property - def pool(self): - """Create thread pool on first request - avoids instantiating unused threadpool for blocking clients. - """ - if self._pool is None: - atexit.register(self.close) - self._pool = ThreadPool(self.pool_threads) - return self._pool + pass @property def user_agent(self): @@ -142,13 +138,42 @@ def set_default(cls, default): """ cls._default = default - def __call_api( - self, resource_path, method, path_params=None, - query_params=None, header_params=None, body=None, post_params=None, - files=None, response_types_map=None, auth_settings=None, - _return_http_data_only=None, collection_formats=None, - _preload_content=True, _request_timeout=None, _host=None, - _request_auth=None): + def param_serialize( + self, + method, + resource_path, + path_params=None, + query_params=None, + header_params=None, + body=None, + post_params=None, + files=None, auth_settings=None, + collection_formats=None, + _host=None, + _request_auth=None + ) -> RequestSerialized: + + """Builds the HTTP request params needed by the request. + :param method: Method to call. + :param resource_path: Path to method endpoint. + :param path_params: Path parameters in the url. + :param query_params: Query parameters in the url. + :param header_params: Header parameters to be + placed in the request header. + :param body: Request body. + :param post_params dict: Request post form parameters, + for `application/x-www-form-urlencoded`, `multipart/form-data`. + :param auth_settings list: Auth Settings names for the request. + :param files dict: key -> filename, value -> filepath, + for `multipart/form-data`. + :param collection_formats: dict of collection formats for path, query, + header, and post parameters. + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the authentication + in the spec for a single request. + :return: tuple of form (path, http_method, query_params, header_params, + body, post_params, files) + """ config = self.configuration @@ -159,14 +184,17 @@ def __call_api( header_params['Cookie'] = self.cookie if header_params: header_params = self.sanitize_for_serialization(header_params) - header_params = dict(self.parameters_to_tuples(header_params, - collection_formats)) + header_params = dict( + self.parameters_to_tuples(header_params,collection_formats) + ) # path parameters if path_params: path_params = self.sanitize_for_serialization(path_params) - path_params = self.parameters_to_tuples(path_params, - collection_formats) + path_params = self.parameters_to_tuples( + path_params, + collection_formats + ) for k, v in path_params: # specified safe chars, encode everything resource_path = resource_path.replace( @@ -178,22 +206,30 @@ def __call_api( if post_params or files: post_params = post_params if post_params else [] post_params = self.sanitize_for_serialization(post_params) - post_params = self.parameters_to_tuples(post_params, - collection_formats) - post_params.extend(self.files_parameters(files)) + post_params = self.parameters_to_tuples( + post_params, + collection_formats + ) + if files: + post_params.extend(self.files_parameters(files)) # auth setting self.update_params_for_auth( - header_params, query_params, auth_settings, - resource_path, method, body, - request_auth=_request_auth) + header_params, + query_params, + auth_settings, + resource_path, + method, + body, + request_auth=_request_auth + ) # body if body: body = self.sanitize_for_serialization(body) # request url - if _host is None: + if _host is None or self.configuration.ignore_operation_servers: url = self.configuration.host + resource_path else: # use server/host defined in path or operation instead @@ -202,68 +238,109 @@ def __call_api( # query parameters if query_params: query_params = self.sanitize_for_serialization(query_params) - url_query = self.parameters_to_url_query(query_params, - collection_formats) + url_query = self.parameters_to_url_query( + query_params, + collection_formats + ) url += "?" + url_query + return method, url, header_params, body, post_params + + + def call_api( + self, + method, + url, + header_params=None, + body=None, + post_params=None, + _request_timeout=None + ) -> rest.RESTResponse: + """Makes the HTTP request (synchronous) + :param method: Method to call. + :param url: Path to method endpoint. + :param header_params: Header parameters to be + placed in the request header. + :param body: Request body. + :param post_params dict: Request post form parameters, + for `application/x-www-form-urlencoded`, `multipart/form-data`. + :param _request_timeout: timeout setting for this request. + :return: RESTResponse + """ + try: # perform request and return response - response_data = self.request( + response_data = self.rest_client.request( method, url, - query_params=query_params, headers=header_params, - post_params=post_params, body=body, - _preload_content=_preload_content, - _request_timeout=_request_timeout) + body=body, post_params=post_params, + _request_timeout=_request_timeout + ) + except ApiException as e: - if e.body: - e.body = e.body.decode('utf-8') raise e - self.last_response = response_data - - return_data = None # assuming derialization is not needed - # data needs deserialization or returns HTTP data (deserialized) only - if _preload_content or _return_http_data_only: - response_type = response_types_map.get(str(response_data.status), None) - if not response_type and isinstance(response_data.status, int) and 100 <= response_data.status <= 599: - # if not found, look for '1XX', '2XX', etc. - response_type = response_types_map.get(str(response_data.status)[0] + "XX", None) - - if response_type == "bytearray": - response_data.data = response_data.data - elif response_data.data is not None: - # Note: fixed handling of empty responses - match = None - content_type = response_data.getheader('content-type') - if content_type is not None: - match = re.search(r"charset=([a-zA-Z\-\d]+)[\s;]?", content_type) - encoding = match.group(1) if match else "utf-8" - response_data.data = response_data.data.decode(encoding) - - # deserialize response data - if response_type == "bytearray": - return_data = response_data.data - elif response_type: - return_data = self.deserialize(response_data, response_type) - else: - return_data = None - - if _return_http_data_only: - return return_data - else: - return ApiResponse(status_code = response_data.status, - data = return_data, - headers = response_data.getheaders(), - raw_data = response_data.data) + return response_data + + def response_deserialize( + self, + response_data: rest.RESTResponse, + response_types_map: Optional[Dict[str, ApiResponseT]]=None + ) -> ApiResponse[ApiResponseT]: + """Deserializes response into an object. + :param response_data: RESTResponse object to be deserialized. + :param response_types_map: dict of response types. + :return: ApiResponse + """ + + msg = "RESTResponse.read() must be called before passing it to response_deserialize()" + assert response_data.data is not None, msg + + response_type = response_types_map.get(str(response_data.status), None) + if not response_type and isinstance(response_data.status, int) and 100 <= response_data.status <= 599: + # if not found, look for '1XX', '2XX', etc. + response_type = response_types_map.get(str(response_data.status)[0] + "XX", None) + + # deserialize response data + response_text = None + return_data = None + try: + if response_type == "bytearray": + return_data = response_data.data + elif response_type == "file": + return_data = self.__deserialize_file(response_data) + elif response_type is not None: + match = None + content_type = response_data.getheader('content-type') + if content_type is not None: + match = re.search(r"charset=([a-zA-Z\-\d]+)[\s;]?", content_type) + encoding = match.group(1) if match else "utf-8" + response_text = response_data.data.decode(encoding) + return_data = self.deserialize(response_text, response_type, content_type) + finally: + if not 200 <= response_data.status <= 299: + raise ApiException.from_response( + http_resp=response_data, + body=response_text, + data=return_data, + ) + + return ApiResponse( + status_code = response_data.status, + data = return_data, + headers = response_data.getheaders(), + raw_data = response_data.data + ) def sanitize_for_serialization(self, obj): """Builds a JSON POST object. If obj is None, return None. + If obj is SecretStr, return obj.get_secret_value() If obj is str, int, long, float, bool, return directly. If obj is datetime.datetime, datetime.date convert to string in iso8601 format. + If obj is decimal.Decimal return string representation. If obj is list, sanitize each element in the list. If obj is dict, return the dict. If obj is OpenAPI model, return the properties dict. @@ -273,18 +350,26 @@ def sanitize_for_serialization(self, obj): """ if obj is None: return None + elif isinstance(obj, Enum): + return obj.value + elif isinstance(obj, SecretStr): + return obj.get_secret_value() elif isinstance(obj, self.PRIMITIVE_TYPES): return obj elif isinstance(obj, list): - return [self.sanitize_for_serialization(sub_obj) - for sub_obj in obj] + return [ + self.sanitize_for_serialization(sub_obj) for sub_obj in obj + ] elif isinstance(obj, tuple): - return tuple(self.sanitize_for_serialization(sub_obj) - for sub_obj in obj) + return tuple( + self.sanitize_for_serialization(sub_obj) for sub_obj in obj + ) elif isinstance(obj, (datetime.datetime, datetime.date)): return obj.isoformat() + elif isinstance(obj, decimal.Decimal): + return str(obj) - if isinstance(obj, dict): + elif isinstance(obj, dict): obj_dict = obj else: # Convert model obj to dict except @@ -292,30 +377,45 @@ def sanitize_for_serialization(self, obj): # and attributes which value is not None. # Convert attribute name to json key in # model definition for request. - obj_dict = obj.to_dict() + if hasattr(obj, 'to_dict') and callable(getattr(obj, 'to_dict')): + obj_dict = obj.to_dict() + else: + obj_dict = obj.__dict__ - return {key: self.sanitize_for_serialization(val) - for key, val in obj_dict.items()} + return { + key: self.sanitize_for_serialization(val) + for key, val in obj_dict.items() + } - def deserialize(self, response, response_type): + def deserialize(self, response_text: str, response_type: str, content_type: Optional[str]): """Deserializes response into an object. :param response: RESTResponse object to be deserialized. :param response_type: class literal for deserialized object, or string of class name. + :param content_type: content type of response. :return: deserialized object. """ - # handle file downloading - # save response body into a tmp file and return the instance - if response_type == "file": - return self.__deserialize_file(response) # fetch data from response object - try: - data = json.loads(response.data) - except ValueError: - data = response.data + if content_type is None: + try: + data = json.loads(response_text) + except ValueError: + data = response_text + elif re.match(r'^application/(json|[\w!#$&.+-^_]+\+json)\s*(;|$)', content_type, re.IGNORECASE): + if response_text == "": + data = "" + else: + data = json.loads(response_text) + elif re.match(r'^text\/[a-z.+-]+\s*(;|$)', content_type, re.IGNORECASE): + data = response_text + else: + raise ApiException( + status=0, + reason="Unsupported content type: {0}".format(content_type) + ) return self.__deserialize(data, response_type) @@ -332,12 +432,16 @@ def __deserialize(self, data, klass): if isinstance(klass, str): if klass.startswith('List['): - sub_kls = re.match(r'List\[(.*)]', klass).group(1) + m = re.match(r'List\[(.*)]', klass) + assert m is not None, "Malformed List type definition" + sub_kls = m.group(1) return [self.__deserialize(sub_data, sub_kls) for sub_data in data] if klass.startswith('Dict['): - sub_kls = re.match(r'Dict\[([^,]*), (.*)]', klass).group(2) + m = re.match(r'Dict\[([^,]*), (.*)]', klass) + assert m is not None, "Malformed Dict type definition" + sub_kls = m.group(2) return {k: self.__deserialize(v, sub_kls) for k, v in data.items()} @@ -355,141 +459,13 @@ def __deserialize(self, data, klass): return self.__deserialize_date(data) elif klass == datetime.datetime: return self.__deserialize_datetime(data) + elif klass == decimal.Decimal: + return decimal.Decimal(data) + elif issubclass(klass, Enum): + return self.__deserialize_enum(data, klass) else: return self.__deserialize_model(data, klass) - def call_api(self, resource_path, method, - path_params=None, query_params=None, header_params=None, - body=None, post_params=None, files=None, - response_types_map=None, auth_settings=None, - async_req=None, _return_http_data_only=None, - collection_formats=None, _preload_content=True, - _request_timeout=None, _host=None, _request_auth=None): - """Makes the HTTP request (synchronous) and returns deserialized data. - - To make an async_req request, set the async_req parameter. - - :param resource_path: Path to method endpoint. - :param method: Method to call. - :param path_params: Path parameters in the url. - :param query_params: Query parameters in the url. - :param header_params: Header parameters to be - placed in the request header. - :param body: Request body. - :param post_params dict: Request post form parameters, - for `application/x-www-form-urlencoded`, `multipart/form-data`. - :param auth_settings list: Auth Settings names for the request. - :param response: Response data type. - :param files dict: key -> filename, value -> filepath, - for `multipart/form-data`. - :param async_req bool: execute request asynchronously - :param _return_http_data_only: response data instead of ApiResponse - object with status code, headers, etc - :param _preload_content: if False, the ApiResponse.data will - be set to none and raw_data will store the - HTTP response body without reading/decoding. - Default is True. - :param collection_formats: dict of collection formats for path, query, - header, and post parameters. - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. - :type _request_token: dict, optional - :return: - If async_req parameter is True, - the request will be called asynchronously. - The method will return the request thread. - If parameter async_req is False or missing, - then the method will return the response directly. - """ - # Note: remove query string in path part for ogc endpoints - resource_path = resource_path.partition("?")[0] - - if not async_req: - return self.__call_api(resource_path, method, - path_params, query_params, header_params, - body, post_params, files, - response_types_map, auth_settings, - _return_http_data_only, collection_formats, - _preload_content, _request_timeout, _host, - _request_auth) - - return self.pool.apply_async(self.__call_api, (resource_path, - method, path_params, - query_params, - header_params, body, - post_params, files, - response_types_map, - auth_settings, - _return_http_data_only, - collection_formats, - _preload_content, - _request_timeout, - _host, _request_auth)) - - def request(self, method, url, query_params=None, headers=None, - post_params=None, body=None, _preload_content=True, - _request_timeout=None): - """Makes the HTTP request using RESTClient.""" - if method == "GET": - return self.rest_client.get_request(url, - query_params=query_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - headers=headers) - elif method == "HEAD": - return self.rest_client.head_request(url, - query_params=query_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - headers=headers) - elif method == "OPTIONS": - return self.rest_client.options_request(url, - query_params=query_params, - headers=headers, - _preload_content=_preload_content, - _request_timeout=_request_timeout) - elif method == "POST": - return self.rest_client.post_request(url, - query_params=query_params, - headers=headers, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) - elif method == "PUT": - return self.rest_client.put_request(url, - query_params=query_params, - headers=headers, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) - elif method == "PATCH": - return self.rest_client.patch_request(url, - query_params=query_params, - headers=headers, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) - elif method == "DELETE": - return self.rest_client.delete_request(url, - query_params=query_params, - headers=headers, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) - else: - raise ApiValueError( - "http method must be `GET`, `HEAD`, `OPTIONS`," - " `POST`, `PATCH`, `PUT` or `DELETE`." - ) - def parameters_to_tuples(self, params, collection_formats): """Get parameters as list of tuples, formatting collections. @@ -497,10 +473,10 @@ def parameters_to_tuples(self, params, collection_formats): :param dict collection_formats: Parameter collection formats :return: Parameters as list of tuples, collections formatted """ - new_params = [] + new_params: List[Tuple[str, str]] = [] if collection_formats is None: collection_formats = {} - for k, v in params.items() if isinstance(params, dict) else params: # noqa: E501 + for k, v in params.items() if isinstance(params, dict) else params: if k in collection_formats: collection_format = collection_formats[k] if collection_format == 'multi': @@ -527,21 +503,21 @@ def parameters_to_url_query(self, params, collection_formats): :param dict collection_formats: Parameter collection formats :return: URL query string (e.g. a=Hello%20World&b=123) """ - new_params = [] + new_params: List[Tuple[str, str]] = [] if collection_formats is None: collection_formats = {} - for k, v in params.items() if isinstance(params, dict) else params: # noqa: E501 - if isinstance(v, (int, float)): - v = str(v) + for k, v in params.items() if isinstance(params, dict) else params: if isinstance(v, bool): v = str(v).lower() + if isinstance(v, (int, float)): + v = str(v) if isinstance(v, dict): v = json.dumps(v) if k in collection_formats: collection_format = collection_formats[k] if collection_format == 'multi': - new_params.extend((k, value) for value in v) + new_params.extend((k, quote(str(value))) for value in v) else: if collection_format == 'ssv': delimiter = ' ' @@ -552,44 +528,56 @@ def parameters_to_url_query(self, params, collection_formats): else: # csv is the default delimiter = ',' new_params.append( - (k, delimiter.join(quote(str(value)) for value in v))) + (k, delimiter.join(quote(str(value)) for value in v)) + ) else: new_params.append((k, quote(str(v)))) - return "&".join(["=".join(item) for item in new_params]) + return "&".join(["=".join(map(str, item)) for item in new_params]) - def files_parameters(self, files=None): + def files_parameters( + self, + files: Dict[str, Union[str, bytes, List[str], List[bytes], Tuple[str, bytes]]], + ): """Builds form parameters. :param files: File parameters. :return: Form parameters with files. """ params = [] - - if files: - for k, v in files.items(): - if not v: - continue - file_names = v if type(v) is list else [v] - for n in file_names: - with open(n, 'rb') as f: - filename = os.path.basename(f.name) - filedata = f.read() - mimetype = (mimetypes.guess_type(filename)[0] or - 'application/octet-stream') - params.append( - tuple([k, tuple([filename, filedata, mimetype])])) - + for k, v in files.items(): + if isinstance(v, str): + with open(v, 'rb') as f: + filename = os.path.basename(f.name) + filedata = f.read() + elif isinstance(v, bytes): + filename = k + filedata = v + elif isinstance(v, tuple): + filename, filedata = v + elif isinstance(v, list): + for file_param in v: + params.extend(self.files_parameters({k: file_param})) + continue + else: + raise ValueError("Unsupported file value") + mimetype = ( + mimetypes.guess_type(filename)[0] + or 'application/octet-stream' + ) + params.append( + tuple([k, tuple([filename, filedata, mimetype])]) + ) return params - def select_header_accept(self, accepts): + def select_header_accept(self, accepts: List[str]) -> Optional[str]: """Returns `Accept` based on an array of accepts provided. :param accepts: List of headers. :return: Accept (e.g. application/json). """ if not accepts: - return + return None for accept in accepts: if re.search('json', accept, re.IGNORECASE): @@ -612,9 +600,16 @@ def select_header_content_type(self, content_types): return content_types[0] - def update_params_for_auth(self, headers, queries, auth_settings, - resource_path, method, body, - request_auth=None): + def update_params_for_auth( + self, + headers, + queries, + auth_settings, + resource_path, + method, + body, + request_auth=None + ) -> None: """Updates header and query params based on authentication setting. :param headers: Header parameters dict to be updated. @@ -631,21 +626,36 @@ def update_params_for_auth(self, headers, queries, auth_settings, return if request_auth: - self._apply_auth_params(headers, queries, - resource_path, method, body, - request_auth) - return - - for auth in auth_settings: - auth_setting = self.configuration.auth_settings().get(auth) - if auth_setting: - self._apply_auth_params(headers, queries, - resource_path, method, body, - auth_setting) - - def _apply_auth_params(self, headers, queries, - resource_path, method, body, - auth_setting): + self._apply_auth_params( + headers, + queries, + resource_path, + method, + body, + request_auth + ) + else: + for auth in auth_settings: + auth_setting = self.configuration.auth_settings().get(auth) + if auth_setting: + self._apply_auth_params( + headers, + queries, + resource_path, + method, + body, + auth_setting + ) + + def _apply_auth_params( + self, + headers, + queries, + resource_path, + method, + body, + auth_setting + ) -> None: """Updates the request parameters based on a single auth_setting :param headers: Header parameters dict to be updated. @@ -674,6 +684,9 @@ def __deserialize_file(self, response): Saves response body into a file in a temporary folder, using the filename from the `Content-Disposition` header if provided. + handle file downloading + save response body into a tmp file and return the instance + :param response: RESTResponse. :return: file path. """ @@ -683,8 +696,12 @@ def __deserialize_file(self, response): content_disposition = response.getheader("Content-Disposition") if content_disposition: - filename = re.search(r'filename=[\'"]?([^\'"\s]+)[\'"]?', - content_disposition).group(1) + m = re.search( + r'filename=[\'"]?([^\'"\s]+)[\'"]?', + content_disposition + ) + assert m is not None, "Unexpected 'content-disposition' header value" + filename = m.group(1) path = os.path.join(os.path.dirname(path), filename) with open(path, "wb") as f: @@ -751,6 +768,24 @@ def __deserialize_datetime(self, string): ) ) + def __deserialize_enum(self, data, klass): + """Deserializes primitive type to enum. + + :param data: primitive type. + :param klass: class literal. + :return: enum value. + """ + try: + return klass(data) + except ValueError: + raise rest.ApiException( + status=0, + reason=( + "Failed to parse `{0}` as `{1}`" + .format(data, klass) + ) + ) + def __deserialize_model(self, data, klass): """Deserializes list or dict to model. diff --git a/python/geoengine_openapi_client/api_response.py b/python/geoengine_openapi_client/api_response.py index a0b62b95..9bc7c11f 100644 --- a/python/geoengine_openapi_client/api_response.py +++ b/python/geoengine_openapi_client/api_response.py @@ -1,25 +1,21 @@ """API response object.""" from __future__ import annotations -from typing import Any, Dict, Optional -from pydantic import Field, StrictInt, StrictStr +from typing import Optional, Generic, Mapping, TypeVar +from pydantic import Field, StrictInt, StrictBytes, BaseModel -class ApiResponse: +T = TypeVar("T") + +class ApiResponse(BaseModel, Generic[T]): """ API response object """ - status_code: Optional[StrictInt] = Field(None, description="HTTP status code") - headers: Optional[Dict[StrictStr, StrictStr]] = Field(None, description="HTTP headers") - data: Optional[Any] = Field(None, description="Deserialized data given the data type") - raw_data: Optional[Any] = Field(None, description="Raw data (HTTP response body)") + status_code: StrictInt = Field(description="HTTP status code") + headers: Optional[Mapping[str, str]] = Field(None, description="HTTP headers") + data: T = Field(description="Deserialized data given the data type") + raw_data: StrictBytes = Field(description="Raw data (HTTP response body)") - def __init__(self, - status_code=None, - headers=None, - data=None, - raw_data=None) -> None: - self.status_code = status_code - self.headers = headers - self.data = data - self.raw_data = raw_data + model_config = { + "arbitrary_types_allowed": True + } diff --git a/python/geoengine_openapi_client/configuration.py b/python/geoengine_openapi_client/configuration.py index 82619840..e993f0dc 100644 --- a/python/geoengine_openapi_client/configuration.py +++ b/python/geoengine_openapi_client/configuration.py @@ -14,12 +14,16 @@ import copy +import http.client as httplib import logging +from logging import FileHandler import multiprocessing import sys +from typing import Any, ClassVar, Dict, List, Literal, Optional, TypedDict +from typing_extensions import NotRequired, Self + import urllib3 -import http.client as httplib JSON_SCHEMA_VALIDATION_KEYWORDS = { 'multipleOf', 'maximum', 'exclusiveMaximum', @@ -27,10 +31,114 @@ 'minLength', 'pattern', 'maxItems', 'minItems' } +ServerVariablesT = Dict[str, str] + +GenericAuthSetting = TypedDict( + "GenericAuthSetting", + { + "type": str, + "in": str, + "key": str, + "value": str, + }, +) + + +OAuth2AuthSetting = TypedDict( + "OAuth2AuthSetting", + { + "type": Literal["oauth2"], + "in": Literal["header"], + "key": Literal["Authorization"], + "value": str, + }, +) + + +APIKeyAuthSetting = TypedDict( + "APIKeyAuthSetting", + { + "type": Literal["api_key"], + "in": str, + "key": str, + "value": Optional[str], + }, +) + + +BasicAuthSetting = TypedDict( + "BasicAuthSetting", + { + "type": Literal["basic"], + "in": Literal["header"], + "key": Literal["Authorization"], + "value": Optional[str], + }, +) + + +BearerFormatAuthSetting = TypedDict( + "BearerFormatAuthSetting", + { + "type": Literal["bearer"], + "in": Literal["header"], + "format": Literal["JWT"], + "key": Literal["Authorization"], + "value": str, + }, +) + + +BearerAuthSetting = TypedDict( + "BearerAuthSetting", + { + "type": Literal["bearer"], + "in": Literal["header"], + "key": Literal["Authorization"], + "value": str, + }, +) + + +HTTPSignatureAuthSetting = TypedDict( + "HTTPSignatureAuthSetting", + { + "type": Literal["http-signature"], + "in": Literal["header"], + "key": Literal["Authorization"], + "value": None, + }, +) + + +AuthSettings = TypedDict( + "AuthSettings", + { + "session_token": BearerFormatAuthSetting, + }, + total=False, +) + + +class HostSettingVariable(TypedDict): + description: str + default_value: str + enum_values: List[str] + + +class HostSetting(TypedDict): + url: str + description: str + variables: NotRequired[Dict[str, HostSettingVariable]] + + class Configuration: """This class contains various settings of the API client. :param host: Base url. + :param ignore_operation_servers + Boolean to ignore operation servers for the API client. + Config will use `host` as the base url regardless of the operation servers. :param api_key: Dict to store API key(s). Each entry in the dict specifies an API key. The dict key is the name of the security scheme in the OAS specification. @@ -53,23 +161,34 @@ class Configuration: values before. :param ssl_ca_cert: str - the path to a file of concatenated CA certificates in PEM format. + :param retries: Number of retries for API requests. :Example: """ - _default = None - - def __init__(self, host=None, - api_key=None, api_key_prefix=None, - username=None, password=None, - access_token=None, - server_index=None, server_variables=None, - server_operation_index=None, server_operation_variables=None, - ssl_ca_cert=None, - ) -> None: + _default: ClassVar[Optional[Self]] = None + + def __init__( + self, + host: Optional[str]=None, + api_key: Optional[Dict[str, str]]=None, + api_key_prefix: Optional[Dict[str, str]]=None, + username: Optional[str]=None, + password: Optional[str]=None, + access_token: Optional[str]=None, + server_index: Optional[int]=None, + server_variables: Optional[ServerVariablesT]=None, + server_operation_index: Optional[Dict[int, int]]=None, + server_operation_variables: Optional[Dict[int, ServerVariablesT]]=None, + ignore_operation_servers: bool=False, + ssl_ca_cert: Optional[str]=None, + retries: Optional[int] = None, + *, + debug: Optional[bool] = None, + ) -> None: """Constructor """ - self._base_path = "http://127.0.0.1:3030/api" if host is None else host + self._base_path = "https://geoengine.io/api" if host is None else host """Default Base url """ self.server_index = 0 if server_index is None and host is None else server_index @@ -80,6 +199,9 @@ def __init__(self, host=None, self.server_operation_variables = server_operation_variables or {} """Default server variables """ + self.ignore_operation_servers = ignore_operation_servers + """Ignore operation servers + """ self.temp_folder_path = None """Temp file folder for downloading files """ @@ -117,13 +239,16 @@ def __init__(self, host=None, self.logger_stream_handler = None """Log stream handler """ - self.logger_file_handler = None + self.logger_file_handler: Optional[FileHandler] = None """Log file handler """ self.logger_file = None """Debug file location """ - self.debug = False + if debug is not None: + self.debug = debug + else: + self.__debug = False """Debug switch """ @@ -157,7 +282,7 @@ def __init__(self, host=None, cpu_count * 5 is used as default value to increase performance. """ - self.proxy = None + self.proxy: Optional[str] = None """Proxy URL """ self.proxy_headers = None @@ -166,7 +291,7 @@ def __init__(self, host=None, self.safe_chars_for_path_param = '' """Safe chars for path_param """ - self.retries = None + self.retries = retries """Adding retries to override urllib3 default value 3 """ # Enable client side validation @@ -184,7 +309,7 @@ def __init__(self, host=None, """date format """ - def __deepcopy__(self, memo): + def __deepcopy__(self, memo: Dict[int, Any]) -> Self: cls = self.__class__ result = cls.__new__(cls) memo[id(self)] = result @@ -198,11 +323,11 @@ def __deepcopy__(self, memo): result.debug = self.debug return result - def __setattr__(self, name, value): + def __setattr__(self, name: str, value: Any) -> None: object.__setattr__(self, name, value) @classmethod - def set_default(cls, default): + def set_default(cls, default: Optional[Self]) -> None: """Set default instance of configuration. It stores default configuration, which can be @@ -213,7 +338,7 @@ def set_default(cls, default): cls._default = default @classmethod - def get_default_copy(cls): + def get_default_copy(cls) -> Self: """Deprecated. Please use `get_default` instead. Deprecated. Please use `get_default` instead. @@ -223,7 +348,7 @@ def get_default_copy(cls): return cls.get_default() @classmethod - def get_default(cls): + def get_default(cls) -> Self: """Return the default configuration. This method returns newly created, based on default constructor, @@ -233,11 +358,11 @@ def get_default(cls): :return: The configuration object. """ if cls._default is None: - cls._default = Configuration() + cls._default = cls() return cls._default @property - def logger_file(self): + def logger_file(self) -> Optional[str]: """The logger file. If the logger_file is None, then add stream handler and remove file @@ -249,7 +374,7 @@ def logger_file(self): return self.__logger_file @logger_file.setter - def logger_file(self, value): + def logger_file(self, value: Optional[str]) -> None: """The logger file. If the logger_file is None, then add stream handler and remove file @@ -268,7 +393,7 @@ def logger_file(self, value): logger.addHandler(self.logger_file_handler) @property - def debug(self): + def debug(self) -> bool: """Debug status :param value: The debug status, True or False. @@ -277,7 +402,7 @@ def debug(self): return self.__debug @debug.setter - def debug(self, value): + def debug(self, value: bool) -> None: """Debug status :param value: The debug status, True or False. @@ -299,7 +424,7 @@ def debug(self, value): httplib.HTTPConnection.debuglevel = 0 @property - def logger_format(self): + def logger_format(self) -> str: """The logger format. The logger_formatter will be updated when sets logger_format. @@ -310,7 +435,7 @@ def logger_format(self): return self.__logger_format @logger_format.setter - def logger_format(self, value): + def logger_format(self, value: str) -> None: """The logger format. The logger_formatter will be updated when sets logger_format. @@ -321,7 +446,7 @@ def logger_format(self, value): self.__logger_format = value self.logger_formatter = logging.Formatter(self.__logger_format) - def get_api_key_with_prefix(self, identifier, alias=None): + def get_api_key_with_prefix(self, identifier: str, alias: Optional[str]=None) -> Optional[str]: """Gets API key (with prefix if set). :param identifier: The identifier of apiKey. @@ -338,7 +463,9 @@ def get_api_key_with_prefix(self, identifier, alias=None): else: return key - def get_basic_auth_token(self): + return None + + def get_basic_auth_token(self) -> Optional[str]: """Gets HTTP basic authentication header (string). :return: The token for basic HTTP authentication. @@ -353,12 +480,12 @@ def get_basic_auth_token(self): basic_auth=username + ':' + password ).get('authorization') - def auth_settings(self): + def auth_settings(self)-> AuthSettings: """Gets Auth Settings dict for api client. :return: The Auth Settings information dict. """ - auth = {} + auth: AuthSettings = {} if self.access_token is not None: auth['session_token'] = { 'type': 'bearer', @@ -369,7 +496,7 @@ def auth_settings(self): } return auth - def to_debug_report(self): + def to_debug_report(self) -> str: """Gets the essential information for debugging. :return: The report for debugging. @@ -381,19 +508,30 @@ def to_debug_report(self): "SDK Package Version: 0.0.19".\ format(env=sys.platform, pyversion=sys.version) - def get_host_settings(self): + def get_host_settings(self) -> List[HostSetting]: """Gets an array of host settings :return: An array of host settings """ return [ { - 'url': "http://127.0.0.1:3030/api", + 'url': "{server}/api", 'description': "No description provided", + 'variables': { + 'server': { + 'description': "No description provided", + 'default_value': "https://geoengine.io", + } + } } ] - def get_host_from_settings(self, index, variables=None, servers=None): + def get_host_from_settings( + self, + index: Optional[int], + variables: Optional[ServerVariablesT]=None, + servers: Optional[List[HostSetting]]=None, + ) -> str: """Gets host URL based on the index and variables :param index: array index of the host settings :param variables: hash of variable and the corresponding value @@ -433,12 +571,12 @@ def get_host_from_settings(self, index, variables=None, servers=None): return url @property - def host(self): + def host(self) -> str: """Return generated host.""" return self.get_host_from_settings(self.server_index, variables=self.server_variables) @host.setter - def host(self, value): + def host(self, value: str) -> None: """Fix base path.""" self._base_path = value self.server_index = None diff --git a/python/geoengine_openapi_client/exceptions.py b/python/geoengine_openapi_client/exceptions.py index 0bfb70c2..8d35d172 100644 --- a/python/geoengine_openapi_client/exceptions.py +++ b/python/geoengine_openapi_client/exceptions.py @@ -12,6 +12,8 @@ Do not edit the class manually. """ # noqa: E501 +from typing import Any, Optional +from typing_extensions import Self class OpenApiException(Exception): """The base exception class for all OpenAPIExceptions""" @@ -102,17 +104,63 @@ def __init__(self, msg, path_to_item=None) -> None: class ApiException(OpenApiException): - def __init__(self, status=None, reason=None, http_resp=None) -> None: + def __init__( + self, + status=None, + reason=None, + http_resp=None, + *, + body: Optional[str] = None, + data: Optional[Any] = None, + ) -> None: + self.status = status + self.reason = reason + self.body = body + self.data = data + self.headers = None + if http_resp: - self.status = http_resp.status - self.reason = http_resp.reason - self.body = http_resp.data + if self.status is None: + self.status = http_resp.status + if self.reason is None: + self.reason = http_resp.reason + if self.body is None: + try: + self.body = http_resp.data.decode('utf-8') + except Exception: + pass self.headers = http_resp.getheaders() - else: - self.status = status - self.reason = reason - self.body = None - self.headers = None + + @classmethod + def from_response( + cls, + *, + http_resp, + body: Optional[str], + data: Optional[Any], + ) -> Self: + if http_resp.status == 400: + raise BadRequestException(http_resp=http_resp, body=body, data=data) + + if http_resp.status == 401: + raise UnauthorizedException(http_resp=http_resp, body=body, data=data) + + if http_resp.status == 403: + raise ForbiddenException(http_resp=http_resp, body=body, data=data) + + if http_resp.status == 404: + raise NotFoundException(http_resp=http_resp, body=body, data=data) + + # Added new conditions for 409 and 422 + if http_resp.status == 409: + raise ConflictException(http_resp=http_resp, body=body, data=data) + + if http_resp.status == 422: + raise UnprocessableEntityException(http_resp=http_resp, body=body, data=data) + + if 500 <= http_resp.status <= 599: + raise ServiceException(http_resp=http_resp, body=body, data=data) + raise ApiException(http_resp=http_resp, body=body, data=data) def __str__(self): """Custom error messages for exception""" @@ -128,38 +176,40 @@ def __str__(self): error_message += "HTTP response headers: {0}\n".format( self.headers) - if self.body: - error_message += "HTTP response body: {0}\n".format(self.body) + if self.data or self.body: + error_message += "HTTP response body: {0}\n".format(self.data or self.body) return error_message + class BadRequestException(ApiException): + pass - def __init__(self, status=None, reason=None, http_resp=None) -> None: - super(BadRequestException, self).__init__(status, reason, http_resp) class NotFoundException(ApiException): - - def __init__(self, status=None, reason=None, http_resp=None) -> None: - super(NotFoundException, self).__init__(status, reason, http_resp) + pass class UnauthorizedException(ApiException): - - def __init__(self, status=None, reason=None, http_resp=None) -> None: - super(UnauthorizedException, self).__init__(status, reason, http_resp) + pass class ForbiddenException(ApiException): - - def __init__(self, status=None, reason=None, http_resp=None) -> None: - super(ForbiddenException, self).__init__(status, reason, http_resp) + pass class ServiceException(ApiException): + pass + + +class ConflictException(ApiException): + """Exception for HTTP 409 Conflict.""" + pass + - def __init__(self, status=None, reason=None, http_resp=None) -> None: - super(ServiceException, self).__init__(status, reason, http_resp) +class UnprocessableEntityException(ApiException): + """Exception for HTTP 422 Unprocessable Entity.""" + pass def render_path(path_to_item): diff --git a/python/geoengine_openapi_client/models/__init__.py b/python/geoengine_openapi_client/models/__init__.py index 1fd35d8b..18c128fe 100644 --- a/python/geoengine_openapi_client/models/__init__.py +++ b/python/geoengine_openapi_client/models/__init__.py @@ -15,7 +15,6 @@ # import models into model package -from geoengine_openapi_client.models.add_collection200_response import AddCollection200Response from geoengine_openapi_client.models.add_dataset import AddDataset from geoengine_openapi_client.models.add_layer import AddLayer from geoengine_openapi_client.models.add_layer_collection import AddLayerCollection @@ -36,7 +35,6 @@ from geoengine_openapi_client.models.continuous_measurement import ContinuousMeasurement from geoengine_openapi_client.models.coordinate2_d import Coordinate2D from geoengine_openapi_client.models.create_dataset import CreateDataset -from geoengine_openapi_client.models.create_dataset_handler200_response import CreateDatasetHandler200Response from geoengine_openapi_client.models.create_project import CreateProject from geoengine_openapi_client.models.csv_header import CsvHeader from geoengine_openapi_client.models.data_id import DataId @@ -49,7 +47,6 @@ from geoengine_openapi_client.models.dataset_definition import DatasetDefinition from geoengine_openapi_client.models.dataset_listing import DatasetListing from geoengine_openapi_client.models.dataset_resource import DatasetResource -from geoengine_openapi_client.models.date_time import DateTime from geoengine_openapi_client.models.derived_color import DerivedColor from geoengine_openapi_client.models.derived_number import DerivedNumber from geoengine_openapi_client.models.describe_coverage_request import DescribeCoverageRequest @@ -79,6 +76,9 @@ from geoengine_openapi_client.models.get_map_exception_format import GetMapExceptionFormat from geoengine_openapi_client.models.get_map_format import GetMapFormat from geoengine_openapi_client.models.get_map_request import GetMapRequest +from geoengine_openapi_client.models.inline_object import InlineObject +from geoengine_openapi_client.models.inline_object1 import InlineObject1 +from geoengine_openapi_client.models.inline_object2 import InlineObject2 from geoengine_openapi_client.models.internal_data_id import InternalDataId from geoengine_openapi_client.models.layer import Layer from geoengine_openapi_client.models.layer_collection import LayerCollection @@ -86,7 +86,6 @@ from geoengine_openapi_client.models.layer_collection_resource import LayerCollectionResource from geoengine_openapi_client.models.layer_listing import LayerListing from geoengine_openapi_client.models.layer_resource import LayerResource -from geoengine_openapi_client.models.layer_update import LayerUpdate from geoengine_openapi_client.models.layer_visibility import LayerVisibility from geoengine_openapi_client.models.line_symbology import LineSymbology from geoengine_openapi_client.models.linear_gradient import LinearGradient @@ -99,13 +98,11 @@ from geoengine_openapi_client.models.ml_model_name_response import MlModelNameResponse from geoengine_openapi_client.models.ml_model_resource import MlModelResource from geoengine_openapi_client.models.mock_dataset_data_source_loading_info import MockDatasetDataSourceLoadingInfo -from geoengine_openapi_client.models.mock_meta_data import MockMetaData from geoengine_openapi_client.models.multi_band_raster_colorizer import MultiBandRasterColorizer from geoengine_openapi_client.models.multi_line_string import MultiLineString from geoengine_openapi_client.models.multi_point import MultiPoint from geoengine_openapi_client.models.multi_polygon import MultiPolygon from geoengine_openapi_client.models.number_param import NumberParam -from geoengine_openapi_client.models.ogr_meta_data import OgrMetaData from geoengine_openapi_client.models.ogr_source_column_spec import OgrSourceColumnSpec from geoengine_openapi_client.models.ogr_source_dataset import OgrSourceDataset from geoengine_openapi_client.models.ogr_source_dataset_time_type import OgrSourceDatasetTimeType @@ -119,9 +116,9 @@ from geoengine_openapi_client.models.ogr_source_duration_spec_zero import OgrSourceDurationSpecZero from geoengine_openapi_client.models.ogr_source_error_spec import OgrSourceErrorSpec from geoengine_openapi_client.models.ogr_source_time_format import OgrSourceTimeFormat -from geoengine_openapi_client.models.ogr_source_time_format_auto import OgrSourceTimeFormatAuto -from geoengine_openapi_client.models.ogr_source_time_format_custom import OgrSourceTimeFormatCustom -from geoengine_openapi_client.models.ogr_source_time_format_unix_time_stamp import OgrSourceTimeFormatUnixTimeStamp +from geoengine_openapi_client.models.ogr_source_time_format_one_of import OgrSourceTimeFormatOneOf +from geoengine_openapi_client.models.ogr_source_time_format_one_of1 import OgrSourceTimeFormatOneOf1 +from geoengine_openapi_client.models.ogr_source_time_format_one_of2 import OgrSourceTimeFormatOneOf2 from geoengine_openapi_client.models.operator_quota import OperatorQuota from geoengine_openapi_client.models.order_by import OrderBy from geoengine_openapi_client.models.palette_colorizer import PaletteColorizer @@ -131,9 +128,7 @@ from geoengine_openapi_client.models.permission_request import PermissionRequest from geoengine_openapi_client.models.plot import Plot from geoengine_openapi_client.models.plot_output_format import PlotOutputFormat -from geoengine_openapi_client.models.plot_query_rectangle import PlotQueryRectangle from geoengine_openapi_client.models.plot_result_descriptor import PlotResultDescriptor -from geoengine_openapi_client.models.plot_update import PlotUpdate from geoengine_openapi_client.models.point_symbology import PointSymbology from geoengine_openapi_client.models.polygon_symbology import PolygonSymbology from geoengine_openapi_client.models.project import Project @@ -149,6 +144,7 @@ from geoengine_openapi_client.models.provider_capabilities import ProviderCapabilities from geoengine_openapi_client.models.provider_layer_collection_id import ProviderLayerCollectionId from geoengine_openapi_client.models.provider_layer_id import ProviderLayerId +from geoengine_openapi_client.models.query_rectangle import QueryRectangle from geoengine_openapi_client.models.quota import Quota from geoengine_openapi_client.models.raster_band_descriptor import RasterBandDescriptor from geoengine_openapi_client.models.raster_colorizer import RasterColorizer @@ -157,7 +153,6 @@ from geoengine_openapi_client.models.raster_dataset_from_workflow_result import RasterDatasetFromWorkflowResult from geoengine_openapi_client.models.raster_properties_entry_type import RasterPropertiesEntryType from geoengine_openapi_client.models.raster_properties_key import RasterPropertiesKey -from geoengine_openapi_client.models.raster_query_rectangle import RasterQueryRectangle from geoengine_openapi_client.models.raster_result_descriptor import RasterResultDescriptor from geoengine_openapi_client.models.raster_stream_websocket_result_type import RasterStreamWebsocketResultType from geoengine_openapi_client.models.raster_symbology import RasterSymbology @@ -180,6 +175,7 @@ from geoengine_openapi_client.models.spatial_reference_authority import SpatialReferenceAuthority from geoengine_openapi_client.models.spatial_reference_specification import SpatialReferenceSpecification from geoengine_openapi_client.models.spatial_resolution import SpatialResolution +from geoengine_openapi_client.models.static_meta_data import StaticMetaData from geoengine_openapi_client.models.static_number_param import StaticNumberParam from geoengine_openapi_client.models.stroke_param import StrokeParam from geoengine_openapi_client.models.suggest_meta_data import SuggestMetaData @@ -224,9 +220,9 @@ from geoengine_openapi_client.models.user_info import UserInfo from geoengine_openapi_client.models.user_registration import UserRegistration from geoengine_openapi_client.models.user_session import UserSession +from geoengine_openapi_client.models.vec_update import VecUpdate from geoengine_openapi_client.models.vector_column_info import VectorColumnInfo from geoengine_openapi_client.models.vector_data_type import VectorDataType -from geoengine_openapi_client.models.vector_query_rectangle import VectorQueryRectangle from geoengine_openapi_client.models.vector_result_descriptor import VectorResultDescriptor from geoengine_openapi_client.models.volume import Volume from geoengine_openapi_client.models.volume_file_layers_response import VolumeFileLayersResponse diff --git a/python/geoengine_openapi_client/models/add_dataset.py b/python/geoengine_openapi_client/models/add_dataset.py index 8bafb4d5..adfb407b 100644 --- a/python/geoengine_openapi_client/models/add_dataset.py +++ b/python/geoengine_openapi_client/models/add_dataset.py @@ -18,97 +18,108 @@ import re # noqa: F401 import json - -from typing import List, Optional -from pydantic import BaseModel, Field, StrictStr, conlist +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional from geoengine_openapi_client.models.provenance import Provenance from geoengine_openapi_client.models.symbology import Symbology +from typing import Optional, Set +from typing_extensions import Self class AddDataset(BaseModel): """ AddDataset - """ - description: StrictStr = Field(...) - display_name: StrictStr = Field(..., alias="displayName") + """ # noqa: E501 + description: StrictStr + display_name: StrictStr = Field(alias="displayName") name: Optional[StrictStr] = None - provenance: Optional[conlist(Provenance)] = None - source_operator: StrictStr = Field(..., alias="sourceOperator") + provenance: Optional[List[Provenance]] = None + source_operator: StrictStr = Field(alias="sourceOperator") symbology: Optional[Symbology] = None - tags: Optional[conlist(StrictStr)] = None - __properties = ["description", "displayName", "name", "provenance", "sourceOperator", "symbology", "tags"] + tags: Optional[List[StrictStr]] = None + __properties: ClassVar[List[str]] = ["description", "displayName", "name", "provenance", "sourceOperator", "symbology", "tags"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> AddDataset: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of AddDataset from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of each item in provenance (list) _items = [] if self.provenance: - for _item in self.provenance: - if _item: - _items.append(_item.to_dict()) + for _item_provenance in self.provenance: + if _item_provenance: + _items.append(_item_provenance.to_dict()) _dict['provenance'] = _items # override the default output from pydantic by calling `to_dict()` of symbology if self.symbology: _dict['symbology'] = self.symbology.to_dict() - # set to None if name (nullable) is None - # and __fields_set__ contains the field - if self.name is None and "name" in self.__fields_set__: - _dict['name'] = None - # set to None if provenance (nullable) is None - # and __fields_set__ contains the field - if self.provenance is None and "provenance" in self.__fields_set__: + # and model_fields_set contains the field + if self.provenance is None and "provenance" in self.model_fields_set: _dict['provenance'] = None # set to None if symbology (nullable) is None - # and __fields_set__ contains the field - if self.symbology is None and "symbology" in self.__fields_set__: + # and model_fields_set contains the field + if self.symbology is None and "symbology" in self.model_fields_set: _dict['symbology'] = None # set to None if tags (nullable) is None - # and __fields_set__ contains the field - if self.tags is None and "tags" in self.__fields_set__: + # and model_fields_set contains the field + if self.tags is None and "tags" in self.model_fields_set: _dict['tags'] = None return _dict @classmethod - def from_dict(cls, obj: dict) -> AddDataset: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of AddDataset from a dict""" if obj is None: return None if not isinstance(obj, dict): - return AddDataset.parse_obj(obj) + return cls.model_validate(obj) - _obj = AddDataset.parse_obj({ + _obj = cls.model_validate({ "description": obj.get("description"), - "display_name": obj.get("displayName"), + "displayName": obj.get("displayName"), "name": obj.get("name"), - "provenance": [Provenance.from_dict(_item) for _item in obj.get("provenance")] if obj.get("provenance") is not None else None, - "source_operator": obj.get("sourceOperator"), - "symbology": Symbology.from_dict(obj.get("symbology")) if obj.get("symbology") is not None else None, + "provenance": [Provenance.from_dict(_item) for _item in obj["provenance"]] if obj.get("provenance") is not None else None, + "sourceOperator": obj.get("sourceOperator"), + "symbology": Symbology.from_dict(obj["symbology"]) if obj.get("symbology") is not None else None, "tags": obj.get("tags") }) return _obj diff --git a/python/geoengine_openapi_client/models/add_layer.py b/python/geoengine_openapi_client/models/add_layer.py index 8a2d48fc..77f4f519 100644 --- a/python/geoengine_openapi_client/models/add_layer.py +++ b/python/geoengine_openapi_client/models/add_layer.py @@ -18,48 +18,65 @@ import re # noqa: F401 import json - -from typing import Dict, List, Optional -from pydantic import BaseModel, Field, StrictStr, conlist +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing_extensions import Annotated from geoengine_openapi_client.models.symbology import Symbology from geoengine_openapi_client.models.workflow import Workflow +from typing import Optional, Set +from typing_extensions import Self class AddLayer(BaseModel): """ AddLayer - """ - description: StrictStr = Field(...) - metadata: Optional[Dict[str, StrictStr]] = Field(None, description="metadata used for loading the data") - name: StrictStr = Field(...) - properties: Optional[conlist(conlist(StrictStr, max_items=2, min_items=2))] = Field(None, description="properties, for instance, to be rendered in the UI") + """ # noqa: E501 + description: StrictStr + metadata: Optional[Dict[str, StrictStr]] = Field(default=None, description="metadata used for loading the data") + name: StrictStr + properties: Optional[List[Annotated[List[StrictStr], Field(min_length=2, max_length=2)]]] = Field(default=None, description="properties, for instance, to be rendered in the UI") symbology: Optional[Symbology] = None - workflow: Workflow = Field(...) - __properties = ["description", "metadata", "name", "properties", "symbology", "workflow"] + workflow: Workflow + __properties: ClassVar[List[str]] = ["description", "metadata", "name", "properties", "symbology", "workflow"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> AddLayer: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of AddLayer from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of symbology if self.symbology: _dict['symbology'] = self.symbology.to_dict() @@ -67,28 +84,28 @@ def to_dict(self): if self.workflow: _dict['workflow'] = self.workflow.to_dict() # set to None if symbology (nullable) is None - # and __fields_set__ contains the field - if self.symbology is None and "symbology" in self.__fields_set__: + # and model_fields_set contains the field + if self.symbology is None and "symbology" in self.model_fields_set: _dict['symbology'] = None return _dict @classmethod - def from_dict(cls, obj: dict) -> AddLayer: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of AddLayer from a dict""" if obj is None: return None if not isinstance(obj, dict): - return AddLayer.parse_obj(obj) + return cls.model_validate(obj) - _obj = AddLayer.parse_obj({ + _obj = cls.model_validate({ "description": obj.get("description"), "metadata": obj.get("metadata"), "name": obj.get("name"), "properties": obj.get("properties"), - "symbology": Symbology.from_dict(obj.get("symbology")) if obj.get("symbology") is not None else None, - "workflow": Workflow.from_dict(obj.get("workflow")) if obj.get("workflow") is not None else None + "symbology": Symbology.from_dict(obj["symbology"]) if obj.get("symbology") is not None else None, + "workflow": Workflow.from_dict(obj["workflow"]) if obj.get("workflow") is not None else None }) return _obj diff --git a/python/geoengine_openapi_client/models/add_layer_collection.py b/python/geoengine_openapi_client/models/add_layer_collection.py index 0d8009c8..51901c28 100644 --- a/python/geoengine_openapi_client/models/add_layer_collection.py +++ b/python/geoengine_openapi_client/models/add_layer_collection.py @@ -18,55 +18,72 @@ import re # noqa: F401 import json - -from typing import List, Optional -from pydantic import BaseModel, Field, StrictStr, conlist +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self class AddLayerCollection(BaseModel): """ AddLayerCollection - """ - description: StrictStr = Field(...) - name: StrictStr = Field(...) - properties: Optional[conlist(conlist(StrictStr, max_items=2, min_items=2))] = None - __properties = ["description", "name", "properties"] + """ # noqa: E501 + description: StrictStr + name: StrictStr + properties: Optional[List[Annotated[List[StrictStr], Field(min_length=2, max_length=2)]]] = None + __properties: ClassVar[List[str]] = ["description", "name", "properties"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> AddLayerCollection: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of AddLayerCollection from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> AddLayerCollection: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of AddLayerCollection from a dict""" if obj is None: return None if not isinstance(obj, dict): - return AddLayerCollection.parse_obj(obj) + return cls.model_validate(obj) - _obj = AddLayerCollection.parse_obj({ + _obj = cls.model_validate({ "description": obj.get("description"), "name": obj.get("name"), "properties": obj.get("properties") diff --git a/python/geoengine_openapi_client/models/add_role.py b/python/geoengine_openapi_client/models/add_role.py index eab22528..a87f302a 100644 --- a/python/geoengine_openapi_client/models/add_role.py +++ b/python/geoengine_openapi_client/models/add_role.py @@ -18,53 +18,69 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self class AddRole(BaseModel): """ AddRole - """ - name: StrictStr = Field(...) - __properties = ["name"] + """ # noqa: E501 + name: StrictStr + __properties: ClassVar[List[str]] = ["name"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> AddRole: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of AddRole from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> AddRole: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of AddRole from a dict""" if obj is None: return None if not isinstance(obj, dict): - return AddRole.parse_obj(obj) + return cls.model_validate(obj) - _obj = AddRole.parse_obj({ + _obj = cls.model_validate({ "name": obj.get("name") }) return _obj diff --git a/python/geoengine_openapi_client/models/auth_code_request_url.py b/python/geoengine_openapi_client/models/auth_code_request_url.py index 0e93ad71..169271ba 100644 --- a/python/geoengine_openapi_client/models/auth_code_request_url.py +++ b/python/geoengine_openapi_client/models/auth_code_request_url.py @@ -18,53 +18,69 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self class AuthCodeRequestURL(BaseModel): """ AuthCodeRequestURL - """ - url: StrictStr = Field(...) - __properties = ["url"] + """ # noqa: E501 + url: StrictStr + __properties: ClassVar[List[str]] = ["url"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> AuthCodeRequestURL: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of AuthCodeRequestURL from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> AuthCodeRequestURL: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of AuthCodeRequestURL from a dict""" if obj is None: return None if not isinstance(obj, dict): - return AuthCodeRequestURL.parse_obj(obj) + return cls.model_validate(obj) - _obj = AuthCodeRequestURL.parse_obj({ + _obj = cls.model_validate({ "url": obj.get("url") }) return _obj diff --git a/python/geoengine_openapi_client/models/auth_code_response.py b/python/geoengine_openapi_client/models/auth_code_response.py index 3834ea0e..b71cd352 100644 --- a/python/geoengine_openapi_client/models/auth_code_response.py +++ b/python/geoengine_openapi_client/models/auth_code_response.py @@ -18,57 +18,73 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self class AuthCodeResponse(BaseModel): """ AuthCodeResponse - """ - code: StrictStr = Field(...) - session_state: StrictStr = Field(..., alias="sessionState") - state: StrictStr = Field(...) - __properties = ["code", "sessionState", "state"] + """ # noqa: E501 + code: StrictStr + session_state: StrictStr = Field(alias="sessionState") + state: StrictStr + __properties: ClassVar[List[str]] = ["code", "sessionState", "state"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> AuthCodeResponse: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of AuthCodeResponse from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> AuthCodeResponse: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of AuthCodeResponse from a dict""" if obj is None: return None if not isinstance(obj, dict): - return AuthCodeResponse.parse_obj(obj) + return cls.model_validate(obj) - _obj = AuthCodeResponse.parse_obj({ + _obj = cls.model_validate({ "code": obj.get("code"), - "session_state": obj.get("sessionState"), + "sessionState": obj.get("sessionState"), "state": obj.get("state") }) return _obj diff --git a/python/geoengine_openapi_client/models/auto_create_dataset.py b/python/geoengine_openapi_client/models/auto_create_dataset.py index 565f698a..953b9737 100644 --- a/python/geoengine_openapi_client/models/auto_create_dataset.py +++ b/python/geoengine_openapi_client/models/auto_create_dataset.py @@ -18,72 +18,88 @@ import re # noqa: F401 import json - -from typing import List, Optional -from pydantic import BaseModel, Field, StrictStr, conlist +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self class AutoCreateDataset(BaseModel): """ AutoCreateDataset - """ - dataset_description: StrictStr = Field(..., alias="datasetDescription") - dataset_name: StrictStr = Field(..., alias="datasetName") - layer_name: Optional[StrictStr] = Field(None, alias="layerName") - main_file: StrictStr = Field(..., alias="mainFile") - tags: Optional[conlist(StrictStr)] = None - upload: StrictStr = Field(...) - __properties = ["datasetDescription", "datasetName", "layerName", "mainFile", "tags", "upload"] - - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + """ # noqa: E501 + dataset_description: StrictStr = Field(alias="datasetDescription") + dataset_name: StrictStr = Field(alias="datasetName") + layer_name: Optional[StrictStr] = Field(default=None, alias="layerName") + main_file: StrictStr = Field(alias="mainFile") + tags: Optional[List[StrictStr]] = None + upload: StrictStr + __properties: ClassVar[List[str]] = ["datasetDescription", "datasetName", "layerName", "mainFile", "tags", "upload"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> AutoCreateDataset: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of AutoCreateDataset from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # set to None if layer_name (nullable) is None - # and __fields_set__ contains the field - if self.layer_name is None and "layer_name" in self.__fields_set__: + # and model_fields_set contains the field + if self.layer_name is None and "layer_name" in self.model_fields_set: _dict['layerName'] = None # set to None if tags (nullable) is None - # and __fields_set__ contains the field - if self.tags is None and "tags" in self.__fields_set__: + # and model_fields_set contains the field + if self.tags is None and "tags" in self.model_fields_set: _dict['tags'] = None return _dict @classmethod - def from_dict(cls, obj: dict) -> AutoCreateDataset: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of AutoCreateDataset from a dict""" if obj is None: return None if not isinstance(obj, dict): - return AutoCreateDataset.parse_obj(obj) + return cls.model_validate(obj) - _obj = AutoCreateDataset.parse_obj({ - "dataset_description": obj.get("datasetDescription"), - "dataset_name": obj.get("datasetName"), - "layer_name": obj.get("layerName"), - "main_file": obj.get("mainFile"), + _obj = cls.model_validate({ + "datasetDescription": obj.get("datasetDescription"), + "datasetName": obj.get("datasetName"), + "layerName": obj.get("layerName"), + "mainFile": obj.get("mainFile"), "tags": obj.get("tags"), "upload": obj.get("upload") }) diff --git a/python/geoengine_openapi_client/models/axis_order.py b/python/geoengine_openapi_client/models/axis_order.py index b7ca6deb..15ec675e 100644 --- a/python/geoengine_openapi_client/models/axis_order.py +++ b/python/geoengine_openapi_client/models/axis_order.py @@ -13,13 +13,10 @@ """ # noqa: E501 +from __future__ import annotations import json -import pprint -import re # noqa: F401 -from aenum import Enum, no_arg - - - +from enum import Enum +from typing_extensions import Self class AxisOrder(str, Enum): @@ -34,8 +31,8 @@ class AxisOrder(str, Enum): EASTNORTH = 'eastNorth' @classmethod - def from_json(cls, json_str: str) -> AxisOrder: + def from_json(cls, json_str: str) -> Self: """Create an instance of AxisOrder from a JSON string""" - return AxisOrder(json.loads(json_str)) + return cls(json.loads(json_str)) diff --git a/python/geoengine_openapi_client/models/bounding_box2_d.py b/python/geoengine_openapi_client/models/bounding_box2_d.py index 281c51b9..542d384e 100644 --- a/python/geoengine_openapi_client/models/bounding_box2_d.py +++ b/python/geoengine_openapi_client/models/bounding_box2_d.py @@ -18,43 +18,59 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List from geoengine_openapi_client.models.coordinate2_d import Coordinate2D +from typing import Optional, Set +from typing_extensions import Self class BoundingBox2D(BaseModel): """ - A bounding box that includes all border points. Note: may degenerate to a point! # noqa: E501 - """ - lower_left_coordinate: Coordinate2D = Field(..., alias="lowerLeftCoordinate") - upper_right_coordinate: Coordinate2D = Field(..., alias="upperRightCoordinate") - __properties = ["lowerLeftCoordinate", "upperRightCoordinate"] + A bounding box that includes all border points. Note: may degenerate to a point! + """ # noqa: E501 + lower_left_coordinate: Coordinate2D = Field(alias="lowerLeftCoordinate") + upper_right_coordinate: Coordinate2D = Field(alias="upperRightCoordinate") + __properties: ClassVar[List[str]] = ["lowerLeftCoordinate", "upperRightCoordinate"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> BoundingBox2D: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of BoundingBox2D from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of lower_left_coordinate if self.lower_left_coordinate: _dict['lowerLeftCoordinate'] = self.lower_left_coordinate.to_dict() @@ -64,17 +80,17 @@ def to_dict(self): return _dict @classmethod - def from_dict(cls, obj: dict) -> BoundingBox2D: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of BoundingBox2D from a dict""" if obj is None: return None if not isinstance(obj, dict): - return BoundingBox2D.parse_obj(obj) + return cls.model_validate(obj) - _obj = BoundingBox2D.parse_obj({ - "lower_left_coordinate": Coordinate2D.from_dict(obj.get("lowerLeftCoordinate")) if obj.get("lowerLeftCoordinate") is not None else None, - "upper_right_coordinate": Coordinate2D.from_dict(obj.get("upperRightCoordinate")) if obj.get("upperRightCoordinate") is not None else None + _obj = cls.model_validate({ + "lowerLeftCoordinate": Coordinate2D.from_dict(obj["lowerLeftCoordinate"]) if obj.get("lowerLeftCoordinate") is not None else None, + "upperRightCoordinate": Coordinate2D.from_dict(obj["upperRightCoordinate"]) if obj.get("upperRightCoordinate") is not None else None }) return _obj diff --git a/python/geoengine_openapi_client/models/breakpoint.py b/python/geoengine_openapi_client/models/breakpoint.py index da4df929..2e8874fd 100644 --- a/python/geoengine_openapi_client/models/breakpoint.py +++ b/python/geoengine_openapi_client/models/breakpoint.py @@ -18,54 +18,71 @@ import re # noqa: F401 import json - -from typing import List, Union -from pydantic import BaseModel, Field, StrictFloat, StrictInt, conlist +from pydantic import BaseModel, ConfigDict, Field, StrictFloat, StrictInt +from typing import Any, ClassVar, Dict, List, Union +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self class Breakpoint(BaseModel): """ Breakpoint - """ - color: conlist(StrictInt, max_items=4, min_items=4) = Field(...) - value: Union[StrictFloat, StrictInt] = Field(...) - __properties = ["color", "value"] + """ # noqa: E501 + color: Annotated[List[StrictInt], Field(min_length=4, max_length=4)] + value: Union[StrictFloat, StrictInt] + __properties: ClassVar[List[str]] = ["color", "value"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> Breakpoint: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of Breakpoint from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> Breakpoint: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of Breakpoint from a dict""" if obj is None: return None if not isinstance(obj, dict): - return Breakpoint.parse_obj(obj) + return cls.model_validate(obj) - _obj = Breakpoint.parse_obj({ + _obj = cls.model_validate({ "color": obj.get("color"), "value": obj.get("value") }) diff --git a/python/geoengine_openapi_client/models/classification_measurement.py b/python/geoengine_openapi_client/models/classification_measurement.py index 1fdf5188..1a685074 100644 --- a/python/geoengine_openapi_client/models/classification_measurement.py +++ b/python/geoengine_openapi_client/models/classification_measurement.py @@ -18,62 +18,78 @@ import re # noqa: F401 import json - -from typing import Dict -from pydantic import BaseModel, Field, StrictStr, validator +from pydantic import BaseModel, ConfigDict, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self class ClassificationMeasurement(BaseModel): """ ClassificationMeasurement - """ - classes: Dict[str, StrictStr] = Field(...) - measurement: StrictStr = Field(...) - type: StrictStr = Field(...) - __properties = ["classes", "measurement", "type"] + """ # noqa: E501 + classes: Dict[str, StrictStr] + measurement: StrictStr + type: StrictStr + __properties: ClassVar[List[str]] = ["classes", "measurement", "type"] - @validator('type') + @field_validator('type') def type_validate_enum(cls, value): """Validates the enum""" - if value not in ('classification'): + if value not in set(['classification']): raise ValueError("must be one of enum values ('classification')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> ClassificationMeasurement: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of ClassificationMeasurement from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> ClassificationMeasurement: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of ClassificationMeasurement from a dict""" if obj is None: return None if not isinstance(obj, dict): - return ClassificationMeasurement.parse_obj(obj) + return cls.model_validate(obj) - _obj = ClassificationMeasurement.parse_obj({ + _obj = cls.model_validate({ "classes": obj.get("classes"), "measurement": obj.get("measurement"), "type": obj.get("type") diff --git a/python/geoengine_openapi_client/models/collection_item.py b/python/geoengine_openapi_client/models/collection_item.py index 00c2c6f8..7a4b4c1a 100644 --- a/python/geoengine_openapi_client/models/collection_item.py +++ b/python/geoengine_openapi_client/models/collection_item.py @@ -14,17 +14,15 @@ from __future__ import annotations -from inspect import getfullargspec import json import pprint -import re # noqa: F401 - +from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator from typing import Any, List, Optional -from pydantic import BaseModel, Field, StrictStr, ValidationError, validator from geoengine_openapi_client.models.layer_collection_listing import LayerCollectionListing from geoengine_openapi_client.models.layer_listing import LayerListing -from typing import Union, Any, List, TYPE_CHECKING from pydantic import StrictStr, Field +from typing import Union, List, Set, Optional, Dict +from typing_extensions import Literal, Self COLLECTIONITEM_ONE_OF_SCHEMAS = ["LayerCollectionListing", "LayerListing"] @@ -36,16 +34,16 @@ class CollectionItem(BaseModel): oneof_schema_1_validator: Optional[LayerCollectionListing] = None # data type: LayerListing oneof_schema_2_validator: Optional[LayerListing] = None - if TYPE_CHECKING: - actual_instance: Union[LayerCollectionListing, LayerListing] - else: - actual_instance: Any - one_of_schemas: List[str] = Field(COLLECTIONITEM_ONE_OF_SCHEMAS, const=True) + actual_instance: Optional[Union[LayerCollectionListing, LayerListing]] = None + one_of_schemas: Set[str] = { "LayerCollectionListing", "LayerListing" } + + model_config = ConfigDict( + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - validate_assignment = True - discriminator_value_class_map = { + discriminator_value_class_map: Dict[str, str] = { } def __init__(self, *args, **kwargs) -> None: @@ -58,9 +56,9 @@ def __init__(self, *args, **kwargs) -> None: else: super().__init__(**kwargs) - @validator('actual_instance') + @field_validator('actual_instance') def actual_instance_must_validate_oneof(cls, v): - instance = CollectionItem.construct() + instance = CollectionItem.model_construct() error_messages = [] match = 0 # validate data type: LayerCollectionListing @@ -83,13 +81,13 @@ def actual_instance_must_validate_oneof(cls, v): return v @classmethod - def from_dict(cls, obj: dict) -> CollectionItem: + def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: return cls.from_json(json.dumps(obj)) @classmethod - def from_json(cls, json_str: str) -> CollectionItem: + def from_json(cls, json_str: str) -> Self: """Returns the object represented by the json string""" - instance = CollectionItem.construct() + instance = cls.model_construct() error_messages = [] match = 0 @@ -99,22 +97,22 @@ def from_json(cls, json_str: str) -> CollectionItem: raise ValueError("Failed to lookup data type from the field `type` in the input.") # check if data type is `LayerCollectionListing` - if _data_type == "LayerCollectionListing": + if _data_type == "collection": instance.actual_instance = LayerCollectionListing.from_json(json_str) return instance # check if data type is `LayerListing` - if _data_type == "LayerListing": + if _data_type == "layer": instance.actual_instance = LayerListing.from_json(json_str) return instance # check if data type is `LayerCollectionListing` - if _data_type == "collection": + if _data_type == "LayerCollectionListing": instance.actual_instance = LayerCollectionListing.from_json(json_str) return instance # check if data type is `LayerListing` - if _data_type == "layer": + if _data_type == "LayerListing": instance.actual_instance = LayerListing.from_json(json_str) return instance @@ -145,19 +143,17 @@ def to_json(self) -> str: if self.actual_instance is None: return "null" - to_json = getattr(self.actual_instance, "to_json", None) - if callable(to_json): + if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): return self.actual_instance.to_json() else: return json.dumps(self.actual_instance) - def to_dict(self) -> dict: + def to_dict(self) -> Optional[Union[Dict[str, Any], LayerCollectionListing, LayerListing]]: """Returns the dict representation of the actual instance""" if self.actual_instance is None: return None - to_dict = getattr(self.actual_instance, "to_dict", None) - if callable(to_dict): + if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): return self.actual_instance.to_dict() else: # primitive type @@ -165,6 +161,6 @@ def to_dict(self) -> dict: def to_str(self) -> str: """Returns the string representation of the actual instance""" - return pprint.pformat(self.dict()) + return pprint.pformat(self.model_dump()) diff --git a/python/geoengine_openapi_client/models/collection_type.py b/python/geoengine_openapi_client/models/collection_type.py index 5e3f580b..602c8b8b 100644 --- a/python/geoengine_openapi_client/models/collection_type.py +++ b/python/geoengine_openapi_client/models/collection_type.py @@ -13,13 +13,10 @@ """ # noqa: E501 +from __future__ import annotations import json -import pprint -import re # noqa: F401 -from aenum import Enum, no_arg - - - +from enum import Enum +from typing_extensions import Self class CollectionType(str, Enum): @@ -33,8 +30,8 @@ class CollectionType(str, Enum): FEATURECOLLECTION = 'FeatureCollection' @classmethod - def from_json(cls, json_str: str) -> CollectionType: + def from_json(cls, json_str: str) -> Self: """Create an instance of CollectionType from a JSON string""" - return CollectionType(json.loads(json_str)) + return cls(json.loads(json_str)) diff --git a/python/geoengine_openapi_client/models/color_param.py b/python/geoengine_openapi_client/models/color_param.py index e7277bc5..c7747c01 100644 --- a/python/geoengine_openapi_client/models/color_param.py +++ b/python/geoengine_openapi_client/models/color_param.py @@ -14,17 +14,15 @@ from __future__ import annotations -from inspect import getfullargspec import json import pprint -import re # noqa: F401 - +from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator from typing import Any, List, Optional -from pydantic import BaseModel, Field, StrictStr, ValidationError, validator from geoengine_openapi_client.models.color_param_static import ColorParamStatic from geoengine_openapi_client.models.derived_color import DerivedColor -from typing import Union, Any, List, TYPE_CHECKING from pydantic import StrictStr, Field +from typing import Union, List, Set, Optional, Dict +from typing_extensions import Literal, Self COLORPARAM_ONE_OF_SCHEMAS = ["ColorParamStatic", "DerivedColor"] @@ -36,16 +34,16 @@ class ColorParam(BaseModel): oneof_schema_1_validator: Optional[ColorParamStatic] = None # data type: DerivedColor oneof_schema_2_validator: Optional[DerivedColor] = None - if TYPE_CHECKING: - actual_instance: Union[ColorParamStatic, DerivedColor] - else: - actual_instance: Any - one_of_schemas: List[str] = Field(COLORPARAM_ONE_OF_SCHEMAS, const=True) + actual_instance: Optional[Union[ColorParamStatic, DerivedColor]] = None + one_of_schemas: Set[str] = { "ColorParamStatic", "DerivedColor" } + + model_config = ConfigDict( + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - validate_assignment = True - discriminator_value_class_map = { + discriminator_value_class_map: Dict[str, str] = { } def __init__(self, *args, **kwargs) -> None: @@ -58,9 +56,9 @@ def __init__(self, *args, **kwargs) -> None: else: super().__init__(**kwargs) - @validator('actual_instance') + @field_validator('actual_instance') def actual_instance_must_validate_oneof(cls, v): - instance = ColorParam.construct() + instance = ColorParam.model_construct() error_messages = [] match = 0 # validate data type: ColorParamStatic @@ -83,13 +81,13 @@ def actual_instance_must_validate_oneof(cls, v): return v @classmethod - def from_dict(cls, obj: dict) -> ColorParam: + def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: return cls.from_json(json.dumps(obj)) @classmethod - def from_json(cls, json_str: str) -> ColorParam: + def from_json(cls, json_str: str) -> Self: """Returns the object represented by the json string""" - instance = ColorParam.construct() + instance = cls.model_construct() error_messages = [] match = 0 @@ -98,16 +96,6 @@ def from_json(cls, json_str: str) -> ColorParam: if not _data_type: raise ValueError("Failed to lookup data type from the field `type` in the input.") - # check if data type is `ColorParamStatic` - if _data_type == "ColorParamStatic": - instance.actual_instance = ColorParamStatic.from_json(json_str) - return instance - - # check if data type is `DerivedColor` - if _data_type == "DerivedColor": - instance.actual_instance = DerivedColor.from_json(json_str) - return instance - # check if data type is `DerivedColor` if _data_type == "derived": instance.actual_instance = DerivedColor.from_json(json_str) @@ -118,6 +106,16 @@ def from_json(cls, json_str: str) -> ColorParam: instance.actual_instance = ColorParamStatic.from_json(json_str) return instance + # check if data type is `ColorParamStatic` + if _data_type == "ColorParamStatic": + instance.actual_instance = ColorParamStatic.from_json(json_str) + return instance + + # check if data type is `DerivedColor` + if _data_type == "DerivedColor": + instance.actual_instance = DerivedColor.from_json(json_str) + return instance + # deserialize data into ColorParamStatic try: instance.actual_instance = ColorParamStatic.from_json(json_str) @@ -145,19 +143,17 @@ def to_json(self) -> str: if self.actual_instance is None: return "null" - to_json = getattr(self.actual_instance, "to_json", None) - if callable(to_json): + if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): return self.actual_instance.to_json() else: return json.dumps(self.actual_instance) - def to_dict(self) -> dict: + def to_dict(self) -> Optional[Union[Dict[str, Any], ColorParamStatic, DerivedColor]]: """Returns the dict representation of the actual instance""" if self.actual_instance is None: return None - to_dict = getattr(self.actual_instance, "to_dict", None) - if callable(to_dict): + if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): return self.actual_instance.to_dict() else: # primitive type @@ -165,6 +161,6 @@ def to_dict(self) -> dict: def to_str(self) -> str: """Returns the string representation of the actual instance""" - return pprint.pformat(self.dict()) + return pprint.pformat(self.model_dump()) diff --git a/python/geoengine_openapi_client/models/color_param_static.py b/python/geoengine_openapi_client/models/color_param_static.py index 810a351a..a5cd7de6 100644 --- a/python/geoengine_openapi_client/models/color_param_static.py +++ b/python/geoengine_openapi_client/models/color_param_static.py @@ -18,61 +18,78 @@ import re # noqa: F401 import json - -from typing import List -from pydantic import BaseModel, Field, StrictInt, StrictStr, conlist, validator +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self class ColorParamStatic(BaseModel): """ ColorParamStatic - """ - color: conlist(StrictInt, max_items=4, min_items=4) = Field(...) - type: StrictStr = Field(...) - __properties = ["color", "type"] + """ # noqa: E501 + color: Annotated[List[StrictInt], Field(min_length=4, max_length=4)] + type: StrictStr + __properties: ClassVar[List[str]] = ["color", "type"] - @validator('type') + @field_validator('type') def type_validate_enum(cls, value): """Validates the enum""" - if value not in ('static', 'derived'): - raise ValueError("must be one of enum values ('static', 'derived')") + if value not in set(['static']): + raise ValueError("must be one of enum values ('static')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> ColorParamStatic: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of ColorParamStatic from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> ColorParamStatic: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of ColorParamStatic from a dict""" if obj is None: return None if not isinstance(obj, dict): - return ColorParamStatic.parse_obj(obj) + return cls.model_validate(obj) - _obj = ColorParamStatic.parse_obj({ + _obj = cls.model_validate({ "color": obj.get("color"), "type": obj.get("type") }) diff --git a/python/geoengine_openapi_client/models/colorizer.py b/python/geoengine_openapi_client/models/colorizer.py index 17612998..cd242dba 100644 --- a/python/geoengine_openapi_client/models/colorizer.py +++ b/python/geoengine_openapi_client/models/colorizer.py @@ -14,18 +14,16 @@ from __future__ import annotations -from inspect import getfullargspec import json import pprint -import re # noqa: F401 - +from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator from typing import Any, List, Optional -from pydantic import BaseModel, Field, StrictStr, ValidationError, validator from geoengine_openapi_client.models.linear_gradient import LinearGradient from geoengine_openapi_client.models.logarithmic_gradient import LogarithmicGradient from geoengine_openapi_client.models.palette_colorizer import PaletteColorizer -from typing import Union, Any, List, TYPE_CHECKING from pydantic import StrictStr, Field +from typing import Union, List, Set, Optional, Dict +from typing_extensions import Literal, Self COLORIZER_ONE_OF_SCHEMAS = ["LinearGradient", "LogarithmicGradient", "PaletteColorizer"] @@ -39,16 +37,16 @@ class Colorizer(BaseModel): oneof_schema_2_validator: Optional[LogarithmicGradient] = None # data type: PaletteColorizer oneof_schema_3_validator: Optional[PaletteColorizer] = None - if TYPE_CHECKING: - actual_instance: Union[LinearGradient, LogarithmicGradient, PaletteColorizer] - else: - actual_instance: Any - one_of_schemas: List[str] = Field(COLORIZER_ONE_OF_SCHEMAS, const=True) + actual_instance: Optional[Union[LinearGradient, LogarithmicGradient, PaletteColorizer]] = None + one_of_schemas: Set[str] = { "LinearGradient", "LogarithmicGradient", "PaletteColorizer" } + + model_config = ConfigDict( + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - validate_assignment = True - discriminator_value_class_map = { + discriminator_value_class_map: Dict[str, str] = { } def __init__(self, *args, **kwargs) -> None: @@ -61,9 +59,9 @@ def __init__(self, *args, **kwargs) -> None: else: super().__init__(**kwargs) - @validator('actual_instance') + @field_validator('actual_instance') def actual_instance_must_validate_oneof(cls, v): - instance = Colorizer.construct() + instance = Colorizer.model_construct() error_messages = [] match = 0 # validate data type: LinearGradient @@ -91,13 +89,13 @@ def actual_instance_must_validate_oneof(cls, v): return v @classmethod - def from_dict(cls, obj: dict) -> Colorizer: + def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: return cls.from_json(json.dumps(obj)) @classmethod - def from_json(cls, json_str: str) -> Colorizer: + def from_json(cls, json_str: str) -> Self: """Returns the object represented by the json string""" - instance = Colorizer.construct() + instance = cls.model_construct() error_messages = [] match = 0 @@ -107,32 +105,32 @@ def from_json(cls, json_str: str) -> Colorizer: raise ValueError("Failed to lookup data type from the field `type` in the input.") # check if data type is `LinearGradient` - if _data_type == "LinearGradient": + if _data_type == "linearGradient": instance.actual_instance = LinearGradient.from_json(json_str) return instance # check if data type is `LogarithmicGradient` - if _data_type == "LogarithmicGradient": + if _data_type == "logarithmicGradient": instance.actual_instance = LogarithmicGradient.from_json(json_str) return instance # check if data type is `PaletteColorizer` - if _data_type == "PaletteColorizer": + if _data_type == "palette": instance.actual_instance = PaletteColorizer.from_json(json_str) return instance # check if data type is `LinearGradient` - if _data_type == "linearGradient": + if _data_type == "LinearGradient": instance.actual_instance = LinearGradient.from_json(json_str) return instance # check if data type is `LogarithmicGradient` - if _data_type == "logarithmicGradient": + if _data_type == "LogarithmicGradient": instance.actual_instance = LogarithmicGradient.from_json(json_str) return instance # check if data type is `PaletteColorizer` - if _data_type == "palette": + if _data_type == "PaletteColorizer": instance.actual_instance = PaletteColorizer.from_json(json_str) return instance @@ -169,19 +167,17 @@ def to_json(self) -> str: if self.actual_instance is None: return "null" - to_json = getattr(self.actual_instance, "to_json", None) - if callable(to_json): + if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): return self.actual_instance.to_json() else: return json.dumps(self.actual_instance) - def to_dict(self) -> dict: + def to_dict(self) -> Optional[Union[Dict[str, Any], LinearGradient, LogarithmicGradient, PaletteColorizer]]: """Returns the dict representation of the actual instance""" if self.actual_instance is None: return None - to_dict = getattr(self.actual_instance, "to_dict", None) - if callable(to_dict): + if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): return self.actual_instance.to_dict() else: # primitive type @@ -189,6 +185,6 @@ def to_dict(self) -> dict: def to_str(self) -> str: """Returns the string representation of the actual instance""" - return pprint.pformat(self.dict()) + return pprint.pformat(self.model_dump()) diff --git a/python/geoengine_openapi_client/models/computation_quota.py b/python/geoengine_openapi_client/models/computation_quota.py index faddf435..879e28a3 100644 --- a/python/geoengine_openapi_client/models/computation_quota.py +++ b/python/geoengine_openapi_client/models/computation_quota.py @@ -19,59 +19,77 @@ import json from datetime import datetime - -from pydantic import BaseModel, Field, StrictStr, conint +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self class ComputationQuota(BaseModel): """ ComputationQuota - """ - computation_id: StrictStr = Field(..., alias="computationId") - count: conint(strict=True, ge=0) = Field(...) - timestamp: datetime = Field(...) - workflow_id: StrictStr = Field(..., alias="workflowId") - __properties = ["computationId", "count", "timestamp", "workflowId"] + """ # noqa: E501 + computation_id: StrictStr = Field(alias="computationId") + count: Annotated[int, Field(strict=True, ge=0)] + timestamp: datetime + workflow_id: StrictStr = Field(alias="workflowId") + __properties: ClassVar[List[str]] = ["computationId", "count", "timestamp", "workflowId"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> ComputationQuota: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of ComputationQuota from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> ComputationQuota: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of ComputationQuota from a dict""" if obj is None: return None if not isinstance(obj, dict): - return ComputationQuota.parse_obj(obj) + return cls.model_validate(obj) - _obj = ComputationQuota.parse_obj({ - "computation_id": obj.get("computationId"), + _obj = cls.model_validate({ + "computationId": obj.get("computationId"), "count": obj.get("count"), "timestamp": obj.get("timestamp"), - "workflow_id": obj.get("workflowId") + "workflowId": obj.get("workflowId") }) return _obj diff --git a/python/geoengine_openapi_client/models/continuous_measurement.py b/python/geoengine_openapi_client/models/continuous_measurement.py index c3182066..7e931a0d 100644 --- a/python/geoengine_openapi_client/models/continuous_measurement.py +++ b/python/geoengine_openapi_client/models/continuous_measurement.py @@ -18,67 +18,83 @@ import re # noqa: F401 import json - -from typing import Optional -from pydantic import BaseModel, Field, StrictStr, validator +from pydantic import BaseModel, ConfigDict, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self class ContinuousMeasurement(BaseModel): """ ContinuousMeasurement - """ - measurement: StrictStr = Field(...) - type: StrictStr = Field(...) + """ # noqa: E501 + measurement: StrictStr + type: StrictStr unit: Optional[StrictStr] = None - __properties = ["measurement", "type", "unit"] + __properties: ClassVar[List[str]] = ["measurement", "type", "unit"] - @validator('type') + @field_validator('type') def type_validate_enum(cls, value): """Validates the enum""" - if value not in ('continuous'): + if value not in set(['continuous']): raise ValueError("must be one of enum values ('continuous')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> ContinuousMeasurement: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of ContinuousMeasurement from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # set to None if unit (nullable) is None - # and __fields_set__ contains the field - if self.unit is None and "unit" in self.__fields_set__: + # and model_fields_set contains the field + if self.unit is None and "unit" in self.model_fields_set: _dict['unit'] = None return _dict @classmethod - def from_dict(cls, obj: dict) -> ContinuousMeasurement: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of ContinuousMeasurement from a dict""" if obj is None: return None if not isinstance(obj, dict): - return ContinuousMeasurement.parse_obj(obj) + return cls.model_validate(obj) - _obj = ContinuousMeasurement.parse_obj({ + _obj = cls.model_validate({ "measurement": obj.get("measurement"), "type": obj.get("type"), "unit": obj.get("unit") diff --git a/python/geoengine_openapi_client/models/coordinate2_d.py b/python/geoengine_openapi_client/models/coordinate2_d.py index 0a425ac8..5c5eed3f 100644 --- a/python/geoengine_openapi_client/models/coordinate2_d.py +++ b/python/geoengine_openapi_client/models/coordinate2_d.py @@ -18,54 +18,70 @@ import re # noqa: F401 import json - -from typing import Union -from pydantic import BaseModel, Field, StrictFloat, StrictInt +from pydantic import BaseModel, ConfigDict, StrictFloat, StrictInt +from typing import Any, ClassVar, Dict, List, Union +from typing import Optional, Set +from typing_extensions import Self class Coordinate2D(BaseModel): """ Coordinate2D - """ - x: Union[StrictFloat, StrictInt] = Field(...) - y: Union[StrictFloat, StrictInt] = Field(...) - __properties = ["x", "y"] + """ # noqa: E501 + x: Union[StrictFloat, StrictInt] + y: Union[StrictFloat, StrictInt] + __properties: ClassVar[List[str]] = ["x", "y"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> Coordinate2D: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of Coordinate2D from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> Coordinate2D: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of Coordinate2D from a dict""" if obj is None: return None if not isinstance(obj, dict): - return Coordinate2D.parse_obj(obj) + return cls.model_validate(obj) - _obj = Coordinate2D.parse_obj({ + _obj = cls.model_validate({ "x": obj.get("x"), "y": obj.get("y") }) diff --git a/python/geoengine_openapi_client/models/create_dataset.py b/python/geoengine_openapi_client/models/create_dataset.py index 59755cd0..18c896d8 100644 --- a/python/geoengine_openapi_client/models/create_dataset.py +++ b/python/geoengine_openapi_client/models/create_dataset.py @@ -18,44 +18,60 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List from geoengine_openapi_client.models.data_path import DataPath from geoengine_openapi_client.models.dataset_definition import DatasetDefinition +from typing import Optional, Set +from typing_extensions import Self class CreateDataset(BaseModel): """ CreateDataset - """ - data_path: DataPath = Field(..., alias="dataPath") - definition: DatasetDefinition = Field(...) - __properties = ["dataPath", "definition"] + """ # noqa: E501 + data_path: DataPath = Field(alias="dataPath") + definition: DatasetDefinition + __properties: ClassVar[List[str]] = ["dataPath", "definition"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> CreateDataset: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of CreateDataset from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of data_path if self.data_path: _dict['dataPath'] = self.data_path.to_dict() @@ -65,17 +81,17 @@ def to_dict(self): return _dict @classmethod - def from_dict(cls, obj: dict) -> CreateDataset: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of CreateDataset from a dict""" if obj is None: return None if not isinstance(obj, dict): - return CreateDataset.parse_obj(obj) + return cls.model_validate(obj) - _obj = CreateDataset.parse_obj({ - "data_path": DataPath.from_dict(obj.get("dataPath")) if obj.get("dataPath") is not None else None, - "definition": DatasetDefinition.from_dict(obj.get("definition")) if obj.get("definition") is not None else None + _obj = cls.model_validate({ + "dataPath": DataPath.from_dict(obj["dataPath"]) if obj.get("dataPath") is not None else None, + "definition": DatasetDefinition.from_dict(obj["definition"]) if obj.get("definition") is not None else None }) return _obj diff --git a/python/geoengine_openapi_client/models/create_project.py b/python/geoengine_openapi_client/models/create_project.py index 0a3cf702..f1908bcd 100644 --- a/python/geoengine_openapi_client/models/create_project.py +++ b/python/geoengine_openapi_client/models/create_project.py @@ -18,46 +18,62 @@ import re # noqa: F401 import json - -from typing import Optional -from pydantic import BaseModel, Field, StrictStr +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional from geoengine_openapi_client.models.st_rectangle import STRectangle from geoengine_openapi_client.models.time_step import TimeStep +from typing import Optional, Set +from typing_extensions import Self class CreateProject(BaseModel): """ CreateProject - """ - bounds: STRectangle = Field(...) - description: StrictStr = Field(...) - name: StrictStr = Field(...) - time_step: Optional[TimeStep] = Field(None, alias="timeStep") - __properties = ["bounds", "description", "name", "timeStep"] + """ # noqa: E501 + bounds: STRectangle + description: StrictStr + name: StrictStr + time_step: Optional[TimeStep] = Field(default=None, alias="timeStep") + __properties: ClassVar[List[str]] = ["bounds", "description", "name", "timeStep"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> CreateProject: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of CreateProject from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of bounds if self.bounds: _dict['bounds'] = self.bounds.to_dict() @@ -65,26 +81,26 @@ def to_dict(self): if self.time_step: _dict['timeStep'] = self.time_step.to_dict() # set to None if time_step (nullable) is None - # and __fields_set__ contains the field - if self.time_step is None and "time_step" in self.__fields_set__: + # and model_fields_set contains the field + if self.time_step is None and "time_step" in self.model_fields_set: _dict['timeStep'] = None return _dict @classmethod - def from_dict(cls, obj: dict) -> CreateProject: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of CreateProject from a dict""" if obj is None: return None if not isinstance(obj, dict): - return CreateProject.parse_obj(obj) + return cls.model_validate(obj) - _obj = CreateProject.parse_obj({ - "bounds": STRectangle.from_dict(obj.get("bounds")) if obj.get("bounds") is not None else None, + _obj = cls.model_validate({ + "bounds": STRectangle.from_dict(obj["bounds"]) if obj.get("bounds") is not None else None, "description": obj.get("description"), "name": obj.get("name"), - "time_step": TimeStep.from_dict(obj.get("timeStep")) if obj.get("timeStep") is not None else None + "timeStep": TimeStep.from_dict(obj["timeStep"]) if obj.get("timeStep") is not None else None }) return _obj diff --git a/python/geoengine_openapi_client/models/csv_header.py b/python/geoengine_openapi_client/models/csv_header.py index c4541a8e..149132f0 100644 --- a/python/geoengine_openapi_client/models/csv_header.py +++ b/python/geoengine_openapi_client/models/csv_header.py @@ -13,13 +13,10 @@ """ # noqa: E501 +from __future__ import annotations import json -import pprint -import re # noqa: F401 -from aenum import Enum, no_arg - - - +from enum import Enum +from typing_extensions import Self class CsvHeader(str, Enum): @@ -35,8 +32,8 @@ class CsvHeader(str, Enum): AUTO = 'auto' @classmethod - def from_json(cls, json_str: str) -> CsvHeader: + def from_json(cls, json_str: str) -> Self: """Create an instance of CsvHeader from a JSON string""" - return CsvHeader(json.loads(json_str)) + return cls(json.loads(json_str)) diff --git a/python/geoengine_openapi_client/models/data_id.py b/python/geoengine_openapi_client/models/data_id.py index 10d22660..e09b8935 100644 --- a/python/geoengine_openapi_client/models/data_id.py +++ b/python/geoengine_openapi_client/models/data_id.py @@ -14,17 +14,15 @@ from __future__ import annotations -from inspect import getfullargspec import json import pprint -import re # noqa: F401 - +from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator from typing import Any, List, Optional -from pydantic import BaseModel, Field, StrictStr, ValidationError, validator from geoengine_openapi_client.models.external_data_id import ExternalDataId from geoengine_openapi_client.models.internal_data_id import InternalDataId -from typing import Union, Any, List, TYPE_CHECKING from pydantic import StrictStr, Field +from typing import Union, List, Set, Optional, Dict +from typing_extensions import Literal, Self DATAID_ONE_OF_SCHEMAS = ["ExternalDataId", "InternalDataId"] @@ -36,16 +34,16 @@ class DataId(BaseModel): oneof_schema_1_validator: Optional[InternalDataId] = None # data type: ExternalDataId oneof_schema_2_validator: Optional[ExternalDataId] = None - if TYPE_CHECKING: - actual_instance: Union[ExternalDataId, InternalDataId] - else: - actual_instance: Any - one_of_schemas: List[str] = Field(DATAID_ONE_OF_SCHEMAS, const=True) + actual_instance: Optional[Union[ExternalDataId, InternalDataId]] = None + one_of_schemas: Set[str] = { "ExternalDataId", "InternalDataId" } + + model_config = ConfigDict( + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - validate_assignment = True - discriminator_value_class_map = { + discriminator_value_class_map: Dict[str, str] = { } def __init__(self, *args, **kwargs) -> None: @@ -58,9 +56,9 @@ def __init__(self, *args, **kwargs) -> None: else: super().__init__(**kwargs) - @validator('actual_instance') + @field_validator('actual_instance') def actual_instance_must_validate_oneof(cls, v): - instance = DataId.construct() + instance = DataId.model_construct() error_messages = [] match = 0 # validate data type: InternalDataId @@ -83,13 +81,13 @@ def actual_instance_must_validate_oneof(cls, v): return v @classmethod - def from_dict(cls, obj: dict) -> DataId: + def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: return cls.from_json(json.dumps(obj)) @classmethod - def from_json(cls, json_str: str) -> DataId: + def from_json(cls, json_str: str) -> Self: """Returns the object represented by the json string""" - instance = DataId.construct() + instance = cls.model_construct() error_messages = [] match = 0 @@ -99,22 +97,22 @@ def from_json(cls, json_str: str) -> DataId: raise ValueError("Failed to lookup data type from the field `type` in the input.") # check if data type is `ExternalDataId` - if _data_type == "ExternalDataId": + if _data_type == "external": instance.actual_instance = ExternalDataId.from_json(json_str) return instance # check if data type is `InternalDataId` - if _data_type == "InternalDataId": + if _data_type == "internal": instance.actual_instance = InternalDataId.from_json(json_str) return instance # check if data type is `ExternalDataId` - if _data_type == "external": + if _data_type == "ExternalDataId": instance.actual_instance = ExternalDataId.from_json(json_str) return instance # check if data type is `InternalDataId` - if _data_type == "internal": + if _data_type == "InternalDataId": instance.actual_instance = InternalDataId.from_json(json_str) return instance @@ -145,19 +143,17 @@ def to_json(self) -> str: if self.actual_instance is None: return "null" - to_json = getattr(self.actual_instance, "to_json", None) - if callable(to_json): + if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): return self.actual_instance.to_json() else: return json.dumps(self.actual_instance) - def to_dict(self) -> dict: + def to_dict(self) -> Optional[Union[Dict[str, Any], ExternalDataId, InternalDataId]]: """Returns the dict representation of the actual instance""" if self.actual_instance is None: return None - to_dict = getattr(self.actual_instance, "to_dict", None) - if callable(to_dict): + if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): return self.actual_instance.to_dict() else: # primitive type @@ -165,6 +161,6 @@ def to_dict(self) -> dict: def to_str(self) -> str: """Returns the string representation of the actual instance""" - return pprint.pformat(self.dict()) + return pprint.pformat(self.model_dump()) diff --git a/python/geoengine_openapi_client/models/data_path.py b/python/geoengine_openapi_client/models/data_path.py index 7c575994..27906fdf 100644 --- a/python/geoengine_openapi_client/models/data_path.py +++ b/python/geoengine_openapi_client/models/data_path.py @@ -14,17 +14,15 @@ from __future__ import annotations -from inspect import getfullargspec import json import pprint -import re # noqa: F401 - +from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator from typing import Any, List, Optional -from pydantic import BaseModel, Field, StrictStr, ValidationError, validator from geoengine_openapi_client.models.data_path_one_of import DataPathOneOf from geoengine_openapi_client.models.data_path_one_of1 import DataPathOneOf1 -from typing import Union, Any, List, TYPE_CHECKING from pydantic import StrictStr, Field +from typing import Union, List, Set, Optional, Dict +from typing_extensions import Literal, Self DATAPATH_ONE_OF_SCHEMAS = ["DataPathOneOf", "DataPathOneOf1"] @@ -36,14 +34,14 @@ class DataPath(BaseModel): oneof_schema_1_validator: Optional[DataPathOneOf] = None # data type: DataPathOneOf1 oneof_schema_2_validator: Optional[DataPathOneOf1] = None - if TYPE_CHECKING: - actual_instance: Union[DataPathOneOf, DataPathOneOf1] - else: - actual_instance: Any - one_of_schemas: List[str] = Field(DATAPATH_ONE_OF_SCHEMAS, const=True) + actual_instance: Optional[Union[DataPathOneOf, DataPathOneOf1]] = None + one_of_schemas: Set[str] = { "DataPathOneOf", "DataPathOneOf1" } + + model_config = ConfigDict( + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - validate_assignment = True def __init__(self, *args, **kwargs) -> None: if args: @@ -55,9 +53,9 @@ def __init__(self, *args, **kwargs) -> None: else: super().__init__(**kwargs) - @validator('actual_instance') + @field_validator('actual_instance') def actual_instance_must_validate_oneof(cls, v): - instance = DataPath.construct() + instance = DataPath.model_construct() error_messages = [] match = 0 # validate data type: DataPathOneOf @@ -80,13 +78,13 @@ def actual_instance_must_validate_oneof(cls, v): return v @classmethod - def from_dict(cls, obj: dict) -> DataPath: + def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: return cls.from_json(json.dumps(obj)) @classmethod - def from_json(cls, json_str: str) -> DataPath: + def from_json(cls, json_str: str) -> Self: """Returns the object represented by the json string""" - instance = DataPath.construct() + instance = cls.model_construct() error_messages = [] match = 0 @@ -117,19 +115,17 @@ def to_json(self) -> str: if self.actual_instance is None: return "null" - to_json = getattr(self.actual_instance, "to_json", None) - if callable(to_json): + if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): return self.actual_instance.to_json() else: return json.dumps(self.actual_instance) - def to_dict(self) -> dict: + def to_dict(self) -> Optional[Union[Dict[str, Any], DataPathOneOf, DataPathOneOf1]]: """Returns the dict representation of the actual instance""" if self.actual_instance is None: return None - to_dict = getattr(self.actual_instance, "to_dict", None) - if callable(to_dict): + if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): return self.actual_instance.to_dict() else: # primitive type @@ -137,6 +133,6 @@ def to_dict(self) -> dict: def to_str(self) -> str: """Returns the string representation of the actual instance""" - return pprint.pformat(self.dict()) + return pprint.pformat(self.model_dump()) diff --git a/python/geoengine_openapi_client/models/data_path_one_of.py b/python/geoengine_openapi_client/models/data_path_one_of.py index 78b42641..18fcf369 100644 --- a/python/geoengine_openapi_client/models/data_path_one_of.py +++ b/python/geoengine_openapi_client/models/data_path_one_of.py @@ -18,53 +18,69 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self class DataPathOneOf(BaseModel): """ DataPathOneOf - """ - volume: StrictStr = Field(...) - __properties = ["volume"] + """ # noqa: E501 + volume: StrictStr + __properties: ClassVar[List[str]] = ["volume"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> DataPathOneOf: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of DataPathOneOf from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> DataPathOneOf: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of DataPathOneOf from a dict""" if obj is None: return None if not isinstance(obj, dict): - return DataPathOneOf.parse_obj(obj) + return cls.model_validate(obj) - _obj = DataPathOneOf.parse_obj({ + _obj = cls.model_validate({ "volume": obj.get("volume") }) return _obj diff --git a/python/geoengine_openapi_client/models/data_path_one_of1.py b/python/geoengine_openapi_client/models/data_path_one_of1.py index ec5d149d..f3bbee58 100644 --- a/python/geoengine_openapi_client/models/data_path_one_of1.py +++ b/python/geoengine_openapi_client/models/data_path_one_of1.py @@ -18,53 +18,69 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self class DataPathOneOf1(BaseModel): """ DataPathOneOf1 - """ - upload: StrictStr = Field(...) - __properties = ["upload"] + """ # noqa: E501 + upload: StrictStr + __properties: ClassVar[List[str]] = ["upload"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> DataPathOneOf1: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of DataPathOneOf1 from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> DataPathOneOf1: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of DataPathOneOf1 from a dict""" if obj is None: return None if not isinstance(obj, dict): - return DataPathOneOf1.parse_obj(obj) + return cls.model_validate(obj) - _obj = DataPathOneOf1.parse_obj({ + _obj = cls.model_validate({ "upload": obj.get("upload") }) return _obj diff --git a/python/geoengine_openapi_client/models/data_usage.py b/python/geoengine_openapi_client/models/data_usage.py index af2fa72b..e1ae09eb 100644 --- a/python/geoengine_openapi_client/models/data_usage.py +++ b/python/geoengine_openapi_client/models/data_usage.py @@ -19,61 +19,79 @@ import json from datetime import datetime - -from pydantic import BaseModel, Field, StrictStr, conint +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self class DataUsage(BaseModel): """ DataUsage - """ - computation_id: StrictStr = Field(..., alias="computationId") - count: conint(strict=True, ge=0) = Field(...) - data: StrictStr = Field(...) - timestamp: datetime = Field(...) - user_id: StrictStr = Field(..., alias="userId") - __properties = ["computationId", "count", "data", "timestamp", "userId"] - - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + """ # noqa: E501 + computation_id: StrictStr = Field(alias="computationId") + count: Annotated[int, Field(strict=True, ge=0)] + data: StrictStr + timestamp: datetime + user_id: StrictStr = Field(alias="userId") + __properties: ClassVar[List[str]] = ["computationId", "count", "data", "timestamp", "userId"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> DataUsage: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of DataUsage from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> DataUsage: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of DataUsage from a dict""" if obj is None: return None if not isinstance(obj, dict): - return DataUsage.parse_obj(obj) + return cls.model_validate(obj) - _obj = DataUsage.parse_obj({ - "computation_id": obj.get("computationId"), + _obj = cls.model_validate({ + "computationId": obj.get("computationId"), "count": obj.get("count"), "data": obj.get("data"), "timestamp": obj.get("timestamp"), - "user_id": obj.get("userId") + "userId": obj.get("userId") }) return _obj diff --git a/python/geoengine_openapi_client/models/data_usage_summary.py b/python/geoengine_openapi_client/models/data_usage_summary.py index a2abbbd2..fc232191 100644 --- a/python/geoengine_openapi_client/models/data_usage_summary.py +++ b/python/geoengine_openapi_client/models/data_usage_summary.py @@ -19,54 +19,72 @@ import json from datetime import datetime - -from pydantic import BaseModel, Field, StrictStr, conint +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self class DataUsageSummary(BaseModel): """ DataUsageSummary - """ - count: conint(strict=True, ge=0) = Field(...) - data: StrictStr = Field(...) - timestamp: datetime = Field(...) - __properties = ["count", "data", "timestamp"] + """ # noqa: E501 + count: Annotated[int, Field(strict=True, ge=0)] + data: StrictStr + timestamp: datetime + __properties: ClassVar[List[str]] = ["count", "data", "timestamp"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> DataUsageSummary: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of DataUsageSummary from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> DataUsageSummary: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of DataUsageSummary from a dict""" if obj is None: return None if not isinstance(obj, dict): - return DataUsageSummary.parse_obj(obj) + return cls.model_validate(obj) - _obj = DataUsageSummary.parse_obj({ + _obj = cls.model_validate({ "count": obj.get("count"), "data": obj.get("data"), "timestamp": obj.get("timestamp") diff --git a/python/geoengine_openapi_client/models/dataset.py b/python/geoengine_openapi_client/models/dataset.py index f93cf589..5637b012 100644 --- a/python/geoengine_openapi_client/models/dataset.py +++ b/python/geoengine_openapi_client/models/dataset.py @@ -18,58 +18,74 @@ import re # noqa: F401 import json - -from typing import List, Optional -from pydantic import BaseModel, Field, StrictStr, conlist +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional from geoengine_openapi_client.models.provenance import Provenance from geoengine_openapi_client.models.symbology import Symbology from geoengine_openapi_client.models.typed_result_descriptor import TypedResultDescriptor +from typing import Optional, Set +from typing_extensions import Self class Dataset(BaseModel): """ Dataset - """ - description: StrictStr = Field(...) - display_name: StrictStr = Field(..., alias="displayName") - id: StrictStr = Field(...) - name: StrictStr = Field(...) - provenance: Optional[conlist(Provenance)] = None - result_descriptor: TypedResultDescriptor = Field(..., alias="resultDescriptor") - source_operator: StrictStr = Field(..., alias="sourceOperator") + """ # noqa: E501 + description: StrictStr + display_name: StrictStr = Field(alias="displayName") + id: StrictStr + name: StrictStr + provenance: Optional[List[Provenance]] = None + result_descriptor: TypedResultDescriptor = Field(alias="resultDescriptor") + source_operator: StrictStr = Field(alias="sourceOperator") symbology: Optional[Symbology] = None - tags: Optional[conlist(StrictStr)] = None - __properties = ["description", "displayName", "id", "name", "provenance", "resultDescriptor", "sourceOperator", "symbology", "tags"] + tags: Optional[List[StrictStr]] = None + __properties: ClassVar[List[str]] = ["description", "displayName", "id", "name", "provenance", "resultDescriptor", "sourceOperator", "symbology", "tags"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> Dataset: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of Dataset from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of each item in provenance (list) _items = [] if self.provenance: - for _item in self.provenance: - if _item: - _items.append(_item.to_dict()) + for _item_provenance in self.provenance: + if _item_provenance: + _items.append(_item_provenance.to_dict()) _dict['provenance'] = _items # override the default output from pydantic by calling `to_dict()` of result_descriptor if self.result_descriptor: @@ -78,40 +94,40 @@ def to_dict(self): if self.symbology: _dict['symbology'] = self.symbology.to_dict() # set to None if provenance (nullable) is None - # and __fields_set__ contains the field - if self.provenance is None and "provenance" in self.__fields_set__: + # and model_fields_set contains the field + if self.provenance is None and "provenance" in self.model_fields_set: _dict['provenance'] = None # set to None if symbology (nullable) is None - # and __fields_set__ contains the field - if self.symbology is None and "symbology" in self.__fields_set__: + # and model_fields_set contains the field + if self.symbology is None and "symbology" in self.model_fields_set: _dict['symbology'] = None # set to None if tags (nullable) is None - # and __fields_set__ contains the field - if self.tags is None and "tags" in self.__fields_set__: + # and model_fields_set contains the field + if self.tags is None and "tags" in self.model_fields_set: _dict['tags'] = None return _dict @classmethod - def from_dict(cls, obj: dict) -> Dataset: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of Dataset from a dict""" if obj is None: return None if not isinstance(obj, dict): - return Dataset.parse_obj(obj) + return cls.model_validate(obj) - _obj = Dataset.parse_obj({ + _obj = cls.model_validate({ "description": obj.get("description"), - "display_name": obj.get("displayName"), + "displayName": obj.get("displayName"), "id": obj.get("id"), "name": obj.get("name"), - "provenance": [Provenance.from_dict(_item) for _item in obj.get("provenance")] if obj.get("provenance") is not None else None, - "result_descriptor": TypedResultDescriptor.from_dict(obj.get("resultDescriptor")) if obj.get("resultDescriptor") is not None else None, - "source_operator": obj.get("sourceOperator"), - "symbology": Symbology.from_dict(obj.get("symbology")) if obj.get("symbology") is not None else None, + "provenance": [Provenance.from_dict(_item) for _item in obj["provenance"]] if obj.get("provenance") is not None else None, + "resultDescriptor": TypedResultDescriptor.from_dict(obj["resultDescriptor"]) if obj.get("resultDescriptor") is not None else None, + "sourceOperator": obj.get("sourceOperator"), + "symbology": Symbology.from_dict(obj["symbology"]) if obj.get("symbology") is not None else None, "tags": obj.get("tags") }) return _obj diff --git a/python/geoengine_openapi_client/models/dataset_definition.py b/python/geoengine_openapi_client/models/dataset_definition.py index a831d24f..3fbe5829 100644 --- a/python/geoengine_openapi_client/models/dataset_definition.py +++ b/python/geoengine_openapi_client/models/dataset_definition.py @@ -18,44 +18,60 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List from geoengine_openapi_client.models.add_dataset import AddDataset from geoengine_openapi_client.models.meta_data_definition import MetaDataDefinition +from typing import Optional, Set +from typing_extensions import Self class DatasetDefinition(BaseModel): """ DatasetDefinition - """ - meta_data: MetaDataDefinition = Field(..., alias="metaData") - properties: AddDataset = Field(...) - __properties = ["metaData", "properties"] + """ # noqa: E501 + meta_data: MetaDataDefinition = Field(alias="metaData") + properties: AddDataset + __properties: ClassVar[List[str]] = ["metaData", "properties"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> DatasetDefinition: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of DatasetDefinition from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of meta_data if self.meta_data: _dict['metaData'] = self.meta_data.to_dict() @@ -65,17 +81,17 @@ def to_dict(self): return _dict @classmethod - def from_dict(cls, obj: dict) -> DatasetDefinition: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of DatasetDefinition from a dict""" if obj is None: return None if not isinstance(obj, dict): - return DatasetDefinition.parse_obj(obj) + return cls.model_validate(obj) - _obj = DatasetDefinition.parse_obj({ - "meta_data": MetaDataDefinition.from_dict(obj.get("metaData")) if obj.get("metaData") is not None else None, - "properties": AddDataset.from_dict(obj.get("properties")) if obj.get("properties") is not None else None + _obj = cls.model_validate({ + "metaData": MetaDataDefinition.from_dict(obj["metaData"]) if obj.get("metaData") is not None else None, + "properties": AddDataset.from_dict(obj["properties"]) if obj.get("properties") is not None else None }) return _obj diff --git a/python/geoengine_openapi_client/models/dataset_listing.py b/python/geoengine_openapi_client/models/dataset_listing.py index 7a81474e..1b0142d9 100644 --- a/python/geoengine_openapi_client/models/dataset_listing.py +++ b/python/geoengine_openapi_client/models/dataset_listing.py @@ -18,50 +18,66 @@ import re # noqa: F401 import json - -from typing import List, Optional -from pydantic import BaseModel, Field, StrictStr, conlist +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional from geoengine_openapi_client.models.symbology import Symbology from geoengine_openapi_client.models.typed_result_descriptor import TypedResultDescriptor +from typing import Optional, Set +from typing_extensions import Self class DatasetListing(BaseModel): """ DatasetListing - """ - description: StrictStr = Field(...) - display_name: StrictStr = Field(..., alias="displayName") - id: StrictStr = Field(...) - name: StrictStr = Field(...) - result_descriptor: TypedResultDescriptor = Field(..., alias="resultDescriptor") - source_operator: StrictStr = Field(..., alias="sourceOperator") + """ # noqa: E501 + description: StrictStr + display_name: StrictStr = Field(alias="displayName") + id: StrictStr + name: StrictStr + result_descriptor: TypedResultDescriptor = Field(alias="resultDescriptor") + source_operator: StrictStr = Field(alias="sourceOperator") symbology: Optional[Symbology] = None - tags: conlist(StrictStr) = Field(...) - __properties = ["description", "displayName", "id", "name", "resultDescriptor", "sourceOperator", "symbology", "tags"] + tags: List[StrictStr] + __properties: ClassVar[List[str]] = ["description", "displayName", "id", "name", "resultDescriptor", "sourceOperator", "symbology", "tags"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> DatasetListing: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of DatasetListing from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of result_descriptor if self.result_descriptor: _dict['resultDescriptor'] = self.result_descriptor.to_dict() @@ -69,29 +85,29 @@ def to_dict(self): if self.symbology: _dict['symbology'] = self.symbology.to_dict() # set to None if symbology (nullable) is None - # and __fields_set__ contains the field - if self.symbology is None and "symbology" in self.__fields_set__: + # and model_fields_set contains the field + if self.symbology is None and "symbology" in self.model_fields_set: _dict['symbology'] = None return _dict @classmethod - def from_dict(cls, obj: dict) -> DatasetListing: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of DatasetListing from a dict""" if obj is None: return None if not isinstance(obj, dict): - return DatasetListing.parse_obj(obj) + return cls.model_validate(obj) - _obj = DatasetListing.parse_obj({ + _obj = cls.model_validate({ "description": obj.get("description"), - "display_name": obj.get("displayName"), + "displayName": obj.get("displayName"), "id": obj.get("id"), "name": obj.get("name"), - "result_descriptor": TypedResultDescriptor.from_dict(obj.get("resultDescriptor")) if obj.get("resultDescriptor") is not None else None, - "source_operator": obj.get("sourceOperator"), - "symbology": Symbology.from_dict(obj.get("symbology")) if obj.get("symbology") is not None else None, + "resultDescriptor": TypedResultDescriptor.from_dict(obj["resultDescriptor"]) if obj.get("resultDescriptor") is not None else None, + "sourceOperator": obj.get("sourceOperator"), + "symbology": Symbology.from_dict(obj["symbology"]) if obj.get("symbology") is not None else None, "tags": obj.get("tags") }) return _obj diff --git a/python/geoengine_openapi_client/models/dataset_resource.py b/python/geoengine_openapi_client/models/dataset_resource.py index 77140160..50ec4781 100644 --- a/python/geoengine_openapi_client/models/dataset_resource.py +++ b/python/geoengine_openapi_client/models/dataset_resource.py @@ -18,61 +18,77 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr, validator +from pydantic import BaseModel, ConfigDict, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self class DatasetResource(BaseModel): """ DatasetResource - """ - id: StrictStr = Field(...) - type: StrictStr = Field(...) - __properties = ["id", "type"] + """ # noqa: E501 + id: StrictStr + type: StrictStr + __properties: ClassVar[List[str]] = ["id", "type"] - @validator('type') + @field_validator('type') def type_validate_enum(cls, value): """Validates the enum""" - if value not in ('dataset'): + if value not in set(['dataset']): raise ValueError("must be one of enum values ('dataset')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> DatasetResource: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of DatasetResource from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> DatasetResource: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of DatasetResource from a dict""" if obj is None: return None if not isinstance(obj, dict): - return DatasetResource.parse_obj(obj) + return cls.model_validate(obj) - _obj = DatasetResource.parse_obj({ + _obj = cls.model_validate({ "id": obj.get("id"), "type": obj.get("type") }) diff --git a/python/geoengine_openapi_client/models/derived_color.py b/python/geoengine_openapi_client/models/derived_color.py index cc0b7061..f45cfad3 100644 --- a/python/geoengine_openapi_client/models/derived_color.py +++ b/python/geoengine_openapi_client/models/derived_color.py @@ -18,68 +18,84 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr, validator +from pydantic import BaseModel, ConfigDict, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List from geoengine_openapi_client.models.colorizer import Colorizer +from typing import Optional, Set +from typing_extensions import Self class DerivedColor(BaseModel): """ DerivedColor - """ - attribute: StrictStr = Field(...) - colorizer: Colorizer = Field(...) - type: StrictStr = Field(...) - __properties = ["attribute", "colorizer", "type"] + """ # noqa: E501 + attribute: StrictStr + colorizer: Colorizer + type: StrictStr + __properties: ClassVar[List[str]] = ["attribute", "colorizer", "type"] - @validator('type') + @field_validator('type') def type_validate_enum(cls, value): """Validates the enum""" - if value not in ('derived'): + if value not in set(['derived']): raise ValueError("must be one of enum values ('derived')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> DerivedColor: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of DerivedColor from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of colorizer if self.colorizer: _dict['colorizer'] = self.colorizer.to_dict() return _dict @classmethod - def from_dict(cls, obj: dict) -> DerivedColor: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of DerivedColor from a dict""" if obj is None: return None if not isinstance(obj, dict): - return DerivedColor.parse_obj(obj) + return cls.model_validate(obj) - _obj = DerivedColor.parse_obj({ + _obj = cls.model_validate({ "attribute": obj.get("attribute"), - "colorizer": Colorizer.from_dict(obj.get("colorizer")) if obj.get("colorizer") is not None else None, + "colorizer": Colorizer.from_dict(obj["colorizer"]) if obj.get("colorizer") is not None else None, "type": obj.get("type") }) return _obj diff --git a/python/geoengine_openapi_client/models/derived_number.py b/python/geoengine_openapi_client/models/derived_number.py index 059d8f45..dd9537f0 100644 --- a/python/geoengine_openapi_client/models/derived_number.py +++ b/python/geoengine_openapi_client/models/derived_number.py @@ -18,65 +18,81 @@ import re # noqa: F401 import json - -from typing import Union -from pydantic import BaseModel, Field, StrictFloat, StrictInt, StrictStr, validator +from pydantic import BaseModel, ConfigDict, Field, StrictFloat, StrictInt, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Union +from typing import Optional, Set +from typing_extensions import Self class DerivedNumber(BaseModel): """ DerivedNumber - """ - attribute: StrictStr = Field(...) - default_value: Union[StrictFloat, StrictInt] = Field(..., alias="defaultValue") - factor: Union[StrictFloat, StrictInt] = Field(...) - type: StrictStr = Field(...) - __properties = ["attribute", "defaultValue", "factor", "type"] - - @validator('type') + """ # noqa: E501 + attribute: StrictStr + default_value: Union[StrictFloat, StrictInt] = Field(alias="defaultValue") + factor: Union[StrictFloat, StrictInt] + type: StrictStr + __properties: ClassVar[List[str]] = ["attribute", "defaultValue", "factor", "type"] + + @field_validator('type') def type_validate_enum(cls, value): """Validates the enum""" - if value not in ('derived'): + if value not in set(['derived']): raise ValueError("must be one of enum values ('derived')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> DerivedNumber: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of DerivedNumber from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> DerivedNumber: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of DerivedNumber from a dict""" if obj is None: return None if not isinstance(obj, dict): - return DerivedNumber.parse_obj(obj) + return cls.model_validate(obj) - _obj = DerivedNumber.parse_obj({ + _obj = cls.model_validate({ "attribute": obj.get("attribute"), - "default_value": obj.get("defaultValue"), + "defaultValue": obj.get("defaultValue"), "factor": obj.get("factor"), "type": obj.get("type") }) diff --git a/python/geoengine_openapi_client/models/describe_coverage_request.py b/python/geoengine_openapi_client/models/describe_coverage_request.py index 91a0efb6..20c93cce 100644 --- a/python/geoengine_openapi_client/models/describe_coverage_request.py +++ b/python/geoengine_openapi_client/models/describe_coverage_request.py @@ -13,13 +13,10 @@ """ # noqa: E501 +from __future__ import annotations import json -import pprint -import re # noqa: F401 -from aenum import Enum, no_arg - - - +from enum import Enum +from typing_extensions import Self class DescribeCoverageRequest(str, Enum): @@ -33,8 +30,8 @@ class DescribeCoverageRequest(str, Enum): DESCRIBECOVERAGE = 'DescribeCoverage' @classmethod - def from_json(cls, json_str: str) -> DescribeCoverageRequest: + def from_json(cls, json_str: str) -> Self: """Create an instance of DescribeCoverageRequest from a JSON string""" - return DescribeCoverageRequest(json.loads(json_str)) + return cls(json.loads(json_str)) diff --git a/python/geoengine_openapi_client/models/error_response.py b/python/geoengine_openapi_client/models/error_response.py index cfe5386e..0fce584d 100644 --- a/python/geoengine_openapi_client/models/error_response.py +++ b/python/geoengine_openapi_client/models/error_response.py @@ -18,54 +18,70 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self class ErrorResponse(BaseModel): """ ErrorResponse - """ - error: StrictStr = Field(...) - message: StrictStr = Field(...) - __properties = ["error", "message"] + """ # noqa: E501 + error: StrictStr + message: StrictStr + __properties: ClassVar[List[str]] = ["error", "message"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> ErrorResponse: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of ErrorResponse from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> ErrorResponse: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of ErrorResponse from a dict""" if obj is None: return None if not isinstance(obj, dict): - return ErrorResponse.parse_obj(obj) + return cls.model_validate(obj) - _obj = ErrorResponse.parse_obj({ + _obj = cls.model_validate({ "error": obj.get("error"), "message": obj.get("message") }) diff --git a/python/geoengine_openapi_client/models/external_data_id.py b/python/geoengine_openapi_client/models/external_data_id.py index e8d6cc8e..e6614056 100644 --- a/python/geoengine_openapi_client/models/external_data_id.py +++ b/python/geoengine_openapi_client/models/external_data_id.py @@ -18,64 +18,80 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr, validator +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self class ExternalDataId(BaseModel): """ ExternalDataId - """ - layer_id: StrictStr = Field(..., alias="layerId") - provider_id: StrictStr = Field(..., alias="providerId") - type: StrictStr = Field(...) - __properties = ["layerId", "providerId", "type"] + """ # noqa: E501 + layer_id: StrictStr = Field(alias="layerId") + provider_id: StrictStr = Field(alias="providerId") + type: StrictStr + __properties: ClassVar[List[str]] = ["layerId", "providerId", "type"] - @validator('type') + @field_validator('type') def type_validate_enum(cls, value): """Validates the enum""" - if value not in ('external'): + if value not in set(['external']): raise ValueError("must be one of enum values ('external')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> ExternalDataId: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of ExternalDataId from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> ExternalDataId: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of ExternalDataId from a dict""" if obj is None: return None if not isinstance(obj, dict): - return ExternalDataId.parse_obj(obj) + return cls.model_validate(obj) - _obj = ExternalDataId.parse_obj({ - "layer_id": obj.get("layerId"), - "provider_id": obj.get("providerId"), + _obj = cls.model_validate({ + "layerId": obj.get("layerId"), + "providerId": obj.get("providerId"), "type": obj.get("type") }) return _obj diff --git a/python/geoengine_openapi_client/models/feature_data_type.py b/python/geoengine_openapi_client/models/feature_data_type.py index 3e52f265..26c9e6b3 100644 --- a/python/geoengine_openapi_client/models/feature_data_type.py +++ b/python/geoengine_openapi_client/models/feature_data_type.py @@ -13,13 +13,10 @@ """ # noqa: E501 +from __future__ import annotations import json -import pprint -import re # noqa: F401 -from aenum import Enum, no_arg - - - +from enum import Enum +from typing_extensions import Self class FeatureDataType(str, Enum): @@ -38,8 +35,8 @@ class FeatureDataType(str, Enum): DATETIME = 'dateTime' @classmethod - def from_json(cls, json_str: str) -> FeatureDataType: + def from_json(cls, json_str: str) -> Self: """Create an instance of FeatureDataType from a JSON string""" - return FeatureDataType(json.loads(json_str)) + return cls(json.loads(json_str)) diff --git a/python/geoengine_openapi_client/models/file_not_found_handling.py b/python/geoengine_openapi_client/models/file_not_found_handling.py index e1da61c4..ef14ea1a 100644 --- a/python/geoengine_openapi_client/models/file_not_found_handling.py +++ b/python/geoengine_openapi_client/models/file_not_found_handling.py @@ -13,13 +13,10 @@ """ # noqa: E501 +from __future__ import annotations import json -import pprint -import re # noqa: F401 -from aenum import Enum, no_arg - - - +from enum import Enum +from typing_extensions import Self class FileNotFoundHandling(str, Enum): @@ -34,8 +31,8 @@ class FileNotFoundHandling(str, Enum): ERROR = 'Error' @classmethod - def from_json(cls, json_str: str) -> FileNotFoundHandling: + def from_json(cls, json_str: str) -> Self: """Create an instance of FileNotFoundHandling from a JSON string""" - return FileNotFoundHandling(json.loads(json_str)) + return cls(json.loads(json_str)) diff --git a/python/geoengine_openapi_client/models/format_specifics.py b/python/geoengine_openapi_client/models/format_specifics.py index 4fee84d7..782adccb 100644 --- a/python/geoengine_openapi_client/models/format_specifics.py +++ b/python/geoengine_openapi_client/models/format_specifics.py @@ -14,16 +14,14 @@ from __future__ import annotations -from inspect import getfullargspec import json import pprint -import re # noqa: F401 - +from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator from typing import Any, List, Optional -from pydantic import BaseModel, Field, StrictStr, ValidationError, validator from geoengine_openapi_client.models.format_specifics_one_of import FormatSpecificsOneOf -from typing import Union, Any, List, TYPE_CHECKING from pydantic import StrictStr, Field +from typing import Union, List, Set, Optional, Dict +from typing_extensions import Literal, Self FORMATSPECIFICS_ONE_OF_SCHEMAS = ["FormatSpecificsOneOf"] @@ -33,14 +31,14 @@ class FormatSpecifics(BaseModel): """ # data type: FormatSpecificsOneOf oneof_schema_1_validator: Optional[FormatSpecificsOneOf] = None - if TYPE_CHECKING: - actual_instance: Union[FormatSpecificsOneOf] - else: - actual_instance: Any - one_of_schemas: List[str] = Field(FORMATSPECIFICS_ONE_OF_SCHEMAS, const=True) + actual_instance: Optional[Union[FormatSpecificsOneOf]] = None + one_of_schemas: Set[str] = { "FormatSpecificsOneOf" } + + model_config = ConfigDict( + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - validate_assignment = True def __init__(self, *args, **kwargs) -> None: if args: @@ -52,9 +50,9 @@ def __init__(self, *args, **kwargs) -> None: else: super().__init__(**kwargs) - @validator('actual_instance') + @field_validator('actual_instance') def actual_instance_must_validate_oneof(cls, v): - instance = FormatSpecifics.construct() + instance = FormatSpecifics.model_construct() error_messages = [] match = 0 # validate data type: FormatSpecificsOneOf @@ -72,13 +70,13 @@ def actual_instance_must_validate_oneof(cls, v): return v @classmethod - def from_dict(cls, obj: dict) -> FormatSpecifics: + def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: return cls.from_json(json.dumps(obj)) @classmethod - def from_json(cls, json_str: str) -> FormatSpecifics: + def from_json(cls, json_str: str) -> Self: """Returns the object represented by the json string""" - instance = FormatSpecifics.construct() + instance = cls.model_construct() error_messages = [] match = 0 @@ -103,19 +101,17 @@ def to_json(self) -> str: if self.actual_instance is None: return "null" - to_json = getattr(self.actual_instance, "to_json", None) - if callable(to_json): + if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): return self.actual_instance.to_json() else: return json.dumps(self.actual_instance) - def to_dict(self) -> dict: + def to_dict(self) -> Optional[Union[Dict[str, Any], FormatSpecificsOneOf]]: """Returns the dict representation of the actual instance""" if self.actual_instance is None: return None - to_dict = getattr(self.actual_instance, "to_dict", None) - if callable(to_dict): + if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): return self.actual_instance.to_dict() else: # primitive type @@ -123,6 +119,6 @@ def to_dict(self) -> dict: def to_str(self) -> str: """Returns the string representation of the actual instance""" - return pprint.pformat(self.dict()) + return pprint.pformat(self.model_dump()) diff --git a/python/geoengine_openapi_client/models/format_specifics_one_of.py b/python/geoengine_openapi_client/models/format_specifics_one_of.py index d5964ece..356387fc 100644 --- a/python/geoengine_openapi_client/models/format_specifics_one_of.py +++ b/python/geoengine_openapi_client/models/format_specifics_one_of.py @@ -18,58 +18,74 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List from geoengine_openapi_client.models.format_specifics_one_of_csv import FormatSpecificsOneOfCsv +from typing import Optional, Set +from typing_extensions import Self class FormatSpecificsOneOf(BaseModel): """ FormatSpecificsOneOf - """ - csv: FormatSpecificsOneOfCsv = Field(...) - __properties = ["csv"] + """ # noqa: E501 + csv: FormatSpecificsOneOfCsv + __properties: ClassVar[List[str]] = ["csv"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> FormatSpecificsOneOf: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of FormatSpecificsOneOf from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of csv if self.csv: _dict['csv'] = self.csv.to_dict() return _dict @classmethod - def from_dict(cls, obj: dict) -> FormatSpecificsOneOf: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of FormatSpecificsOneOf from a dict""" if obj is None: return None if not isinstance(obj, dict): - return FormatSpecificsOneOf.parse_obj(obj) + return cls.model_validate(obj) - _obj = FormatSpecificsOneOf.parse_obj({ - "csv": FormatSpecificsOneOfCsv.from_dict(obj.get("csv")) if obj.get("csv") is not None else None + _obj = cls.model_validate({ + "csv": FormatSpecificsOneOfCsv.from_dict(obj["csv"]) if obj.get("csv") is not None else None }) return _obj diff --git a/python/geoengine_openapi_client/models/format_specifics_one_of_csv.py b/python/geoengine_openapi_client/models/format_specifics_one_of_csv.py index d450b400..14330498 100644 --- a/python/geoengine_openapi_client/models/format_specifics_one_of_csv.py +++ b/python/geoengine_openapi_client/models/format_specifics_one_of_csv.py @@ -18,54 +18,70 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List from geoengine_openapi_client.models.csv_header import CsvHeader +from typing import Optional, Set +from typing_extensions import Self class FormatSpecificsOneOfCsv(BaseModel): """ FormatSpecificsOneOfCsv - """ - header: CsvHeader = Field(...) - __properties = ["header"] + """ # noqa: E501 + header: CsvHeader + __properties: ClassVar[List[str]] = ["header"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> FormatSpecificsOneOfCsv: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of FormatSpecificsOneOfCsv from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> FormatSpecificsOneOfCsv: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of FormatSpecificsOneOfCsv from a dict""" if obj is None: return None if not isinstance(obj, dict): - return FormatSpecificsOneOfCsv.parse_obj(obj) + return cls.model_validate(obj) - _obj = FormatSpecificsOneOfCsv.parse_obj({ + _obj = cls.model_validate({ "header": obj.get("header") }) return _obj diff --git a/python/geoengine_openapi_client/models/gdal_dataset_geo_transform.py b/python/geoengine_openapi_client/models/gdal_dataset_geo_transform.py index 65587959..e54e8bc9 100644 --- a/python/geoengine_openapi_client/models/gdal_dataset_geo_transform.py +++ b/python/geoengine_openapi_client/models/gdal_dataset_geo_transform.py @@ -18,62 +18,78 @@ import re # noqa: F401 import json - -from typing import Union -from pydantic import BaseModel, Field, StrictFloat, StrictInt +from pydantic import BaseModel, ConfigDict, Field, StrictFloat, StrictInt +from typing import Any, ClassVar, Dict, List, Union from geoengine_openapi_client.models.coordinate2_d import Coordinate2D +from typing import Optional, Set +from typing_extensions import Self class GdalDatasetGeoTransform(BaseModel): """ GdalDatasetGeoTransform - """ - origin_coordinate: Coordinate2D = Field(..., alias="originCoordinate") - x_pixel_size: Union[StrictFloat, StrictInt] = Field(..., alias="xPixelSize") - y_pixel_size: Union[StrictFloat, StrictInt] = Field(..., alias="yPixelSize") - __properties = ["originCoordinate", "xPixelSize", "yPixelSize"] + """ # noqa: E501 + origin_coordinate: Coordinate2D = Field(alias="originCoordinate") + x_pixel_size: Union[StrictFloat, StrictInt] = Field(alias="xPixelSize") + y_pixel_size: Union[StrictFloat, StrictInt] = Field(alias="yPixelSize") + __properties: ClassVar[List[str]] = ["originCoordinate", "xPixelSize", "yPixelSize"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> GdalDatasetGeoTransform: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of GdalDatasetGeoTransform from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of origin_coordinate if self.origin_coordinate: _dict['originCoordinate'] = self.origin_coordinate.to_dict() return _dict @classmethod - def from_dict(cls, obj: dict) -> GdalDatasetGeoTransform: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of GdalDatasetGeoTransform from a dict""" if obj is None: return None if not isinstance(obj, dict): - return GdalDatasetGeoTransform.parse_obj(obj) + return cls.model_validate(obj) - _obj = GdalDatasetGeoTransform.parse_obj({ - "origin_coordinate": Coordinate2D.from_dict(obj.get("originCoordinate")) if obj.get("originCoordinate") is not None else None, - "x_pixel_size": obj.get("xPixelSize"), - "y_pixel_size": obj.get("yPixelSize") + _obj = cls.model_validate({ + "originCoordinate": Coordinate2D.from_dict(obj["originCoordinate"]) if obj.get("originCoordinate") is not None else None, + "xPixelSize": obj.get("xPixelSize"), + "yPixelSize": obj.get("yPixelSize") }) return _obj diff --git a/python/geoengine_openapi_client/models/gdal_dataset_parameters.py b/python/geoengine_openapi_client/models/gdal_dataset_parameters.py index 793f5f0b..6d0686d7 100644 --- a/python/geoengine_openapi_client/models/gdal_dataset_parameters.py +++ b/python/geoengine_openapi_client/models/gdal_dataset_parameters.py @@ -18,106 +18,123 @@ import re # noqa: F401 import json - -from typing import List, Optional, Union -from pydantic import BaseModel, Field, StrictBool, StrictFloat, StrictInt, StrictStr, conint, conlist +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictFloat, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional, Union +from typing_extensions import Annotated from geoengine_openapi_client.models.file_not_found_handling import FileNotFoundHandling from geoengine_openapi_client.models.gdal_dataset_geo_transform import GdalDatasetGeoTransform from geoengine_openapi_client.models.gdal_metadata_mapping import GdalMetadataMapping +from typing import Optional, Set +from typing_extensions import Self class GdalDatasetParameters(BaseModel): """ - Parameters for loading data using Gdal # noqa: E501 - """ - allow_alphaband_as_mask: Optional[StrictBool] = Field(None, alias="allowAlphabandAsMask") - file_not_found_handling: FileNotFoundHandling = Field(..., alias="fileNotFoundHandling") - file_path: StrictStr = Field(..., alias="filePath") - gdal_config_options: Optional[conlist(conlist(StrictStr, max_items=2, min_items=2))] = Field(None, alias="gdalConfigOptions") - gdal_open_options: Optional[conlist(StrictStr)] = Field(None, alias="gdalOpenOptions") - geo_transform: GdalDatasetGeoTransform = Field(..., alias="geoTransform") - height: conint(strict=True, ge=0) = Field(...) - no_data_value: Optional[Union[StrictFloat, StrictInt]] = Field(None, alias="noDataValue") - properties_mapping: Optional[conlist(GdalMetadataMapping)] = Field(None, alias="propertiesMapping") - rasterband_channel: conint(strict=True, ge=0) = Field(..., alias="rasterbandChannel") - width: conint(strict=True, ge=0) = Field(...) - __properties = ["allowAlphabandAsMask", "fileNotFoundHandling", "filePath", "gdalConfigOptions", "gdalOpenOptions", "geoTransform", "height", "noDataValue", "propertiesMapping", "rasterbandChannel", "width"] - - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + Parameters for loading data using Gdal + """ # noqa: E501 + allow_alphaband_as_mask: Optional[StrictBool] = Field(default=None, alias="allowAlphabandAsMask") + file_not_found_handling: FileNotFoundHandling = Field(alias="fileNotFoundHandling") + file_path: StrictStr = Field(alias="filePath") + gdal_config_options: Optional[List[Annotated[List[StrictStr], Field(min_length=2, max_length=2)]]] = Field(default=None, alias="gdalConfigOptions") + gdal_open_options: Optional[List[StrictStr]] = Field(default=None, alias="gdalOpenOptions") + geo_transform: GdalDatasetGeoTransform = Field(alias="geoTransform") + height: Annotated[int, Field(strict=True, ge=0)] + no_data_value: Optional[Union[StrictFloat, StrictInt]] = Field(default=None, alias="noDataValue") + properties_mapping: Optional[List[GdalMetadataMapping]] = Field(default=None, alias="propertiesMapping") + rasterband_channel: Annotated[int, Field(strict=True, ge=0)] = Field(alias="rasterbandChannel") + width: Annotated[int, Field(strict=True, ge=0)] + __properties: ClassVar[List[str]] = ["allowAlphabandAsMask", "fileNotFoundHandling", "filePath", "gdalConfigOptions", "gdalOpenOptions", "geoTransform", "height", "noDataValue", "propertiesMapping", "rasterbandChannel", "width"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> GdalDatasetParameters: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of GdalDatasetParameters from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of geo_transform if self.geo_transform: _dict['geoTransform'] = self.geo_transform.to_dict() # override the default output from pydantic by calling `to_dict()` of each item in properties_mapping (list) _items = [] if self.properties_mapping: - for _item in self.properties_mapping: - if _item: - _items.append(_item.to_dict()) + for _item_properties_mapping in self.properties_mapping: + if _item_properties_mapping: + _items.append(_item_properties_mapping.to_dict()) _dict['propertiesMapping'] = _items # set to None if gdal_config_options (nullable) is None - # and __fields_set__ contains the field - if self.gdal_config_options is None and "gdal_config_options" in self.__fields_set__: + # and model_fields_set contains the field + if self.gdal_config_options is None and "gdal_config_options" in self.model_fields_set: _dict['gdalConfigOptions'] = None # set to None if gdal_open_options (nullable) is None - # and __fields_set__ contains the field - if self.gdal_open_options is None and "gdal_open_options" in self.__fields_set__: + # and model_fields_set contains the field + if self.gdal_open_options is None and "gdal_open_options" in self.model_fields_set: _dict['gdalOpenOptions'] = None # set to None if no_data_value (nullable) is None - # and __fields_set__ contains the field - if self.no_data_value is None and "no_data_value" in self.__fields_set__: + # and model_fields_set contains the field + if self.no_data_value is None and "no_data_value" in self.model_fields_set: _dict['noDataValue'] = None # set to None if properties_mapping (nullable) is None - # and __fields_set__ contains the field - if self.properties_mapping is None and "properties_mapping" in self.__fields_set__: + # and model_fields_set contains the field + if self.properties_mapping is None and "properties_mapping" in self.model_fields_set: _dict['propertiesMapping'] = None return _dict @classmethod - def from_dict(cls, obj: dict) -> GdalDatasetParameters: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of GdalDatasetParameters from a dict""" if obj is None: return None if not isinstance(obj, dict): - return GdalDatasetParameters.parse_obj(obj) - - _obj = GdalDatasetParameters.parse_obj({ - "allow_alphaband_as_mask": obj.get("allowAlphabandAsMask"), - "file_not_found_handling": obj.get("fileNotFoundHandling"), - "file_path": obj.get("filePath"), - "gdal_config_options": obj.get("gdalConfigOptions"), - "gdal_open_options": obj.get("gdalOpenOptions"), - "geo_transform": GdalDatasetGeoTransform.from_dict(obj.get("geoTransform")) if obj.get("geoTransform") is not None else None, + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "allowAlphabandAsMask": obj.get("allowAlphabandAsMask"), + "fileNotFoundHandling": obj.get("fileNotFoundHandling"), + "filePath": obj.get("filePath"), + "gdalConfigOptions": obj.get("gdalConfigOptions"), + "gdalOpenOptions": obj.get("gdalOpenOptions"), + "geoTransform": GdalDatasetGeoTransform.from_dict(obj["geoTransform"]) if obj.get("geoTransform") is not None else None, "height": obj.get("height"), - "no_data_value": obj.get("noDataValue"), - "properties_mapping": [GdalMetadataMapping.from_dict(_item) for _item in obj.get("propertiesMapping")] if obj.get("propertiesMapping") is not None else None, - "rasterband_channel": obj.get("rasterbandChannel"), + "noDataValue": obj.get("noDataValue"), + "propertiesMapping": [GdalMetadataMapping.from_dict(_item) for _item in obj["propertiesMapping"]] if obj.get("propertiesMapping") is not None else None, + "rasterbandChannel": obj.get("rasterbandChannel"), "width": obj.get("width") }) return _obj diff --git a/python/geoengine_openapi_client/models/gdal_loading_info_temporal_slice.py b/python/geoengine_openapi_client/models/gdal_loading_info_temporal_slice.py index df33199e..a335ac5e 100644 --- a/python/geoengine_openapi_client/models/gdal_loading_info_temporal_slice.py +++ b/python/geoengine_openapi_client/models/gdal_loading_info_temporal_slice.py @@ -18,45 +18,62 @@ import re # noqa: F401 import json - -from typing import Optional -from pydantic import BaseModel, Field, conint +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from typing_extensions import Annotated from geoengine_openapi_client.models.gdal_dataset_parameters import GdalDatasetParameters from geoengine_openapi_client.models.time_interval import TimeInterval +from typing import Optional, Set +from typing_extensions import Self class GdalLoadingInfoTemporalSlice(BaseModel): """ - one temporal slice of the dataset that requires reading from exactly one Gdal dataset # noqa: E501 - """ - cache_ttl: Optional[conint(strict=True, ge=0)] = Field(None, alias="cacheTtl") + one temporal slice of the dataset that requires reading from exactly one Gdal dataset + """ # noqa: E501 + cache_ttl: Optional[Annotated[int, Field(strict=True, ge=0)]] = Field(default=None, alias="cacheTtl") params: Optional[GdalDatasetParameters] = None - time: TimeInterval = Field(...) - __properties = ["cacheTtl", "params", "time"] + time: TimeInterval + __properties: ClassVar[List[str]] = ["cacheTtl", "params", "time"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> GdalLoadingInfoTemporalSlice: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of GdalLoadingInfoTemporalSlice from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of params if self.params: _dict['params'] = self.params.to_dict() @@ -64,25 +81,25 @@ def to_dict(self): if self.time: _dict['time'] = self.time.to_dict() # set to None if params (nullable) is None - # and __fields_set__ contains the field - if self.params is None and "params" in self.__fields_set__: + # and model_fields_set contains the field + if self.params is None and "params" in self.model_fields_set: _dict['params'] = None return _dict @classmethod - def from_dict(cls, obj: dict) -> GdalLoadingInfoTemporalSlice: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of GdalLoadingInfoTemporalSlice from a dict""" if obj is None: return None if not isinstance(obj, dict): - return GdalLoadingInfoTemporalSlice.parse_obj(obj) + return cls.model_validate(obj) - _obj = GdalLoadingInfoTemporalSlice.parse_obj({ - "cache_ttl": obj.get("cacheTtl"), - "params": GdalDatasetParameters.from_dict(obj.get("params")) if obj.get("params") is not None else None, - "time": TimeInterval.from_dict(obj.get("time")) if obj.get("time") is not None else None + _obj = cls.model_validate({ + "cacheTtl": obj.get("cacheTtl"), + "params": GdalDatasetParameters.from_dict(obj["params"]) if obj.get("params") is not None else None, + "time": TimeInterval.from_dict(obj["time"]) if obj.get("time") is not None else None }) return _obj diff --git a/python/geoengine_openapi_client/models/gdal_meta_data_list.py b/python/geoengine_openapi_client/models/gdal_meta_data_list.py index 7ffad540..c25dacd7 100644 --- a/python/geoengine_openapi_client/models/gdal_meta_data_list.py +++ b/python/geoengine_openapi_client/models/gdal_meta_data_list.py @@ -18,58 +18,74 @@ import re # noqa: F401 import json - -from typing import List -from pydantic import BaseModel, Field, StrictStr, conlist, validator +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List from geoengine_openapi_client.models.gdal_loading_info_temporal_slice import GdalLoadingInfoTemporalSlice from geoengine_openapi_client.models.raster_result_descriptor import RasterResultDescriptor +from typing import Optional, Set +from typing_extensions import Self class GdalMetaDataList(BaseModel): """ GdalMetaDataList - """ - params: conlist(GdalLoadingInfoTemporalSlice) = Field(...) - result_descriptor: RasterResultDescriptor = Field(..., alias="resultDescriptor") - type: StrictStr = Field(...) - __properties = ["params", "resultDescriptor", "type"] + """ # noqa: E501 + params: List[GdalLoadingInfoTemporalSlice] + result_descriptor: RasterResultDescriptor = Field(alias="resultDescriptor") + type: StrictStr + __properties: ClassVar[List[str]] = ["params", "resultDescriptor", "type"] - @validator('type') + @field_validator('type') def type_validate_enum(cls, value): """Validates the enum""" - if value not in ('GdalMetaDataList'): + if value not in set(['GdalMetaDataList']): raise ValueError("must be one of enum values ('GdalMetaDataList')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> GdalMetaDataList: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of GdalMetaDataList from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of each item in params (list) _items = [] if self.params: - for _item in self.params: - if _item: - _items.append(_item.to_dict()) + for _item_params in self.params: + if _item_params: + _items.append(_item_params.to_dict()) _dict['params'] = _items # override the default output from pydantic by calling `to_dict()` of result_descriptor if self.result_descriptor: @@ -77,17 +93,17 @@ def to_dict(self): return _dict @classmethod - def from_dict(cls, obj: dict) -> GdalMetaDataList: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of GdalMetaDataList from a dict""" if obj is None: return None if not isinstance(obj, dict): - return GdalMetaDataList.parse_obj(obj) + return cls.model_validate(obj) - _obj = GdalMetaDataList.parse_obj({ - "params": [GdalLoadingInfoTemporalSlice.from_dict(_item) for _item in obj.get("params")] if obj.get("params") is not None else None, - "result_descriptor": RasterResultDescriptor.from_dict(obj.get("resultDescriptor")) if obj.get("resultDescriptor") is not None else None, + _obj = cls.model_validate({ + "params": [GdalLoadingInfoTemporalSlice.from_dict(_item) for _item in obj["params"]] if obj.get("params") is not None else None, + "resultDescriptor": RasterResultDescriptor.from_dict(obj["resultDescriptor"]) if obj.get("resultDescriptor") is not None else None, "type": obj.get("type") }) return _obj diff --git a/python/geoengine_openapi_client/models/gdal_meta_data_regular.py b/python/geoengine_openapi_client/models/gdal_meta_data_regular.py index 428c14bb..0e10b300 100644 --- a/python/geoengine_openapi_client/models/gdal_meta_data_regular.py +++ b/python/geoengine_openapi_client/models/gdal_meta_data_regular.py @@ -18,59 +18,76 @@ import re # noqa: F401 import json - -from typing import Dict, Optional -from pydantic import BaseModel, Field, StrictStr, conint, validator +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from typing_extensions import Annotated from geoengine_openapi_client.models.gdal_dataset_parameters import GdalDatasetParameters from geoengine_openapi_client.models.gdal_source_time_placeholder import GdalSourceTimePlaceholder from geoengine_openapi_client.models.raster_result_descriptor import RasterResultDescriptor from geoengine_openapi_client.models.time_interval import TimeInterval from geoengine_openapi_client.models.time_step import TimeStep +from typing import Optional, Set +from typing_extensions import Self class GdalMetaDataRegular(BaseModel): """ GdalMetaDataRegular - """ - cache_ttl: Optional[conint(strict=True, ge=0)] = Field(None, alias="cacheTtl") - data_time: TimeInterval = Field(..., alias="dataTime") - params: GdalDatasetParameters = Field(...) - result_descriptor: RasterResultDescriptor = Field(..., alias="resultDescriptor") - step: TimeStep = Field(...) - time_placeholders: Dict[str, GdalSourceTimePlaceholder] = Field(..., alias="timePlaceholders") - type: StrictStr = Field(...) - __properties = ["cacheTtl", "dataTime", "params", "resultDescriptor", "step", "timePlaceholders", "type"] - - @validator('type') + """ # noqa: E501 + cache_ttl: Optional[Annotated[int, Field(strict=True, ge=0)]] = Field(default=None, alias="cacheTtl") + data_time: TimeInterval = Field(alias="dataTime") + params: GdalDatasetParameters + result_descriptor: RasterResultDescriptor = Field(alias="resultDescriptor") + step: TimeStep + time_placeholders: Dict[str, GdalSourceTimePlaceholder] = Field(alias="timePlaceholders") + type: StrictStr + __properties: ClassVar[List[str]] = ["cacheTtl", "dataTime", "params", "resultDescriptor", "step", "timePlaceholders", "type"] + + @field_validator('type') def type_validate_enum(cls, value): """Validates the enum""" - if value not in ('GdalMetaDataRegular'): + if value not in set(['GdalMetaDataRegular']): raise ValueError("must be one of enum values ('GdalMetaDataRegular')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> GdalMetaDataRegular: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of GdalMetaDataRegular from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of data_time if self.data_time: _dict['dataTime'] = self.data_time.to_dict() @@ -86,30 +103,30 @@ def to_dict(self): # override the default output from pydantic by calling `to_dict()` of each value in time_placeholders (dict) _field_dict = {} if self.time_placeholders: - for _key in self.time_placeholders: - if self.time_placeholders[_key]: - _field_dict[_key] = self.time_placeholders[_key].to_dict() + for _key_time_placeholders in self.time_placeholders: + if self.time_placeholders[_key_time_placeholders]: + _field_dict[_key_time_placeholders] = self.time_placeholders[_key_time_placeholders].to_dict() _dict['timePlaceholders'] = _field_dict return _dict @classmethod - def from_dict(cls, obj: dict) -> GdalMetaDataRegular: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of GdalMetaDataRegular from a dict""" if obj is None: return None if not isinstance(obj, dict): - return GdalMetaDataRegular.parse_obj(obj) - - _obj = GdalMetaDataRegular.parse_obj({ - "cache_ttl": obj.get("cacheTtl"), - "data_time": TimeInterval.from_dict(obj.get("dataTime")) if obj.get("dataTime") is not None else None, - "params": GdalDatasetParameters.from_dict(obj.get("params")) if obj.get("params") is not None else None, - "result_descriptor": RasterResultDescriptor.from_dict(obj.get("resultDescriptor")) if obj.get("resultDescriptor") is not None else None, - "step": TimeStep.from_dict(obj.get("step")) if obj.get("step") is not None else None, - "time_placeholders": dict( + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "cacheTtl": obj.get("cacheTtl"), + "dataTime": TimeInterval.from_dict(obj["dataTime"]) if obj.get("dataTime") is not None else None, + "params": GdalDatasetParameters.from_dict(obj["params"]) if obj.get("params") is not None else None, + "resultDescriptor": RasterResultDescriptor.from_dict(obj["resultDescriptor"]) if obj.get("resultDescriptor") is not None else None, + "step": TimeStep.from_dict(obj["step"]) if obj.get("step") is not None else None, + "timePlaceholders": dict( (_k, GdalSourceTimePlaceholder.from_dict(_v)) - for _k, _v in obj.get("timePlaceholders").items() + for _k, _v in obj["timePlaceholders"].items() ) if obj.get("timePlaceholders") is not None else None, diff --git a/python/geoengine_openapi_client/models/gdal_meta_data_static.py b/python/geoengine_openapi_client/models/gdal_meta_data_static.py index cfe6ebdc..6ab704c0 100644 --- a/python/geoengine_openapi_client/models/gdal_meta_data_static.py +++ b/python/geoengine_openapi_client/models/gdal_meta_data_static.py @@ -18,55 +18,72 @@ import re # noqa: F401 import json - -from typing import Optional -from pydantic import BaseModel, Field, StrictStr, conint, validator +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from typing_extensions import Annotated from geoengine_openapi_client.models.gdal_dataset_parameters import GdalDatasetParameters from geoengine_openapi_client.models.raster_result_descriptor import RasterResultDescriptor from geoengine_openapi_client.models.time_interval import TimeInterval +from typing import Optional, Set +from typing_extensions import Self class GdalMetaDataStatic(BaseModel): """ GdalMetaDataStatic - """ - cache_ttl: Optional[conint(strict=True, ge=0)] = Field(None, alias="cacheTtl") - params: GdalDatasetParameters = Field(...) - result_descriptor: RasterResultDescriptor = Field(..., alias="resultDescriptor") + """ # noqa: E501 + cache_ttl: Optional[Annotated[int, Field(strict=True, ge=0)]] = Field(default=None, alias="cacheTtl") + params: GdalDatasetParameters + result_descriptor: RasterResultDescriptor = Field(alias="resultDescriptor") time: Optional[TimeInterval] = None - type: StrictStr = Field(...) - __properties = ["cacheTtl", "params", "resultDescriptor", "time", "type"] + type: StrictStr + __properties: ClassVar[List[str]] = ["cacheTtl", "params", "resultDescriptor", "time", "type"] - @validator('type') + @field_validator('type') def type_validate_enum(cls, value): """Validates the enum""" - if value not in ('GdalStatic'): + if value not in set(['GdalStatic']): raise ValueError("must be one of enum values ('GdalStatic')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> GdalMetaDataStatic: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of GdalMetaDataStatic from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of params if self.params: _dict['params'] = self.params.to_dict() @@ -77,26 +94,26 @@ def to_dict(self): if self.time: _dict['time'] = self.time.to_dict() # set to None if time (nullable) is None - # and __fields_set__ contains the field - if self.time is None and "time" in self.__fields_set__: + # and model_fields_set contains the field + if self.time is None and "time" in self.model_fields_set: _dict['time'] = None return _dict @classmethod - def from_dict(cls, obj: dict) -> GdalMetaDataStatic: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of GdalMetaDataStatic from a dict""" if obj is None: return None if not isinstance(obj, dict): - return GdalMetaDataStatic.parse_obj(obj) + return cls.model_validate(obj) - _obj = GdalMetaDataStatic.parse_obj({ - "cache_ttl": obj.get("cacheTtl"), - "params": GdalDatasetParameters.from_dict(obj.get("params")) if obj.get("params") is not None else None, - "result_descriptor": RasterResultDescriptor.from_dict(obj.get("resultDescriptor")) if obj.get("resultDescriptor") is not None else None, - "time": TimeInterval.from_dict(obj.get("time")) if obj.get("time") is not None else None, + _obj = cls.model_validate({ + "cacheTtl": obj.get("cacheTtl"), + "params": GdalDatasetParameters.from_dict(obj["params"]) if obj.get("params") is not None else None, + "resultDescriptor": RasterResultDescriptor.from_dict(obj["resultDescriptor"]) if obj.get("resultDescriptor") is not None else None, + "time": TimeInterval.from_dict(obj["time"]) if obj.get("time") is not None else None, "type": obj.get("type") }) return _obj diff --git a/python/geoengine_openapi_client/models/gdal_metadata_mapping.py b/python/geoengine_openapi_client/models/gdal_metadata_mapping.py index df3642db..deceaed7 100644 --- a/python/geoengine_openapi_client/models/gdal_metadata_mapping.py +++ b/python/geoengine_openapi_client/models/gdal_metadata_mapping.py @@ -18,45 +18,61 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List from geoengine_openapi_client.models.raster_properties_entry_type import RasterPropertiesEntryType from geoengine_openapi_client.models.raster_properties_key import RasterPropertiesKey +from typing import Optional, Set +from typing_extensions import Self class GdalMetadataMapping(BaseModel): """ GdalMetadataMapping - """ - source_key: RasterPropertiesKey = Field(...) - target_key: RasterPropertiesKey = Field(...) - target_type: RasterPropertiesEntryType = Field(...) - __properties = ["source_key", "target_key", "target_type"] + """ # noqa: E501 + source_key: RasterPropertiesKey + target_key: RasterPropertiesKey + target_type: RasterPropertiesEntryType + __properties: ClassVar[List[str]] = ["source_key", "target_key", "target_type"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> GdalMetadataMapping: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of GdalMetadataMapping from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of source_key if self.source_key: _dict['source_key'] = self.source_key.to_dict() @@ -66,17 +82,17 @@ def to_dict(self): return _dict @classmethod - def from_dict(cls, obj: dict) -> GdalMetadataMapping: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of GdalMetadataMapping from a dict""" if obj is None: return None if not isinstance(obj, dict): - return GdalMetadataMapping.parse_obj(obj) + return cls.model_validate(obj) - _obj = GdalMetadataMapping.parse_obj({ - "source_key": RasterPropertiesKey.from_dict(obj.get("source_key")) if obj.get("source_key") is not None else None, - "target_key": RasterPropertiesKey.from_dict(obj.get("target_key")) if obj.get("target_key") is not None else None, + _obj = cls.model_validate({ + "source_key": RasterPropertiesKey.from_dict(obj["source_key"]) if obj.get("source_key") is not None else None, + "target_key": RasterPropertiesKey.from_dict(obj["target_key"]) if obj.get("target_key") is not None else None, "target_type": obj.get("target_type") }) return _obj diff --git a/python/geoengine_openapi_client/models/gdal_metadata_net_cdf_cf.py b/python/geoengine_openapi_client/models/gdal_metadata_net_cdf_cf.py index 38efe1eb..74705d13 100644 --- a/python/geoengine_openapi_client/models/gdal_metadata_net_cdf_cf.py +++ b/python/geoengine_openapi_client/models/gdal_metadata_net_cdf_cf.py @@ -18,58 +18,75 @@ import re # noqa: F401 import json - -from typing import Optional -from pydantic import BaseModel, Field, StrictInt, StrictStr, conint, validator +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from typing_extensions import Annotated from geoengine_openapi_client.models.gdal_dataset_parameters import GdalDatasetParameters from geoengine_openapi_client.models.raster_result_descriptor import RasterResultDescriptor from geoengine_openapi_client.models.time_step import TimeStep +from typing import Optional, Set +from typing_extensions import Self class GdalMetadataNetCdfCf(BaseModel): """ - Meta data for 4D `NetCDF` CF datasets # noqa: E501 - """ - band_offset: conint(strict=True, ge=0) = Field(..., alias="bandOffset", description="A band offset specifies the first band index to use for the first point in time. All other time steps are added to this offset.") - cache_ttl: Optional[conint(strict=True, ge=0)] = Field(None, alias="cacheTtl") - end: StrictInt = Field(...) - params: GdalDatasetParameters = Field(...) - result_descriptor: RasterResultDescriptor = Field(..., alias="resultDescriptor") - start: StrictInt = Field(...) - step: TimeStep = Field(...) - type: StrictStr = Field(...) - __properties = ["bandOffset", "cacheTtl", "end", "params", "resultDescriptor", "start", "step", "type"] - - @validator('type') + Meta data for 4D `NetCDF` CF datasets + """ # noqa: E501 + band_offset: Annotated[int, Field(strict=True, ge=0)] = Field(description="A band offset specifies the first band index to use for the first point in time. All other time steps are added to this offset.", alias="bandOffset") + cache_ttl: Optional[Annotated[int, Field(strict=True, ge=0)]] = Field(default=None, alias="cacheTtl") + end: StrictInt + params: GdalDatasetParameters + result_descriptor: RasterResultDescriptor = Field(alias="resultDescriptor") + start: StrictInt + step: TimeStep + type: StrictStr + __properties: ClassVar[List[str]] = ["bandOffset", "cacheTtl", "end", "params", "resultDescriptor", "start", "step", "type"] + + @field_validator('type') def type_validate_enum(cls, value): """Validates the enum""" - if value not in ('GdalMetadataNetCdfCf'): + if value not in set(['GdalMetadataNetCdfCf']): raise ValueError("must be one of enum values ('GdalMetadataNetCdfCf')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> GdalMetadataNetCdfCf: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of GdalMetadataNetCdfCf from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of params if self.params: _dict['params'] = self.params.to_dict() @@ -82,22 +99,22 @@ def to_dict(self): return _dict @classmethod - def from_dict(cls, obj: dict) -> GdalMetadataNetCdfCf: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of GdalMetadataNetCdfCf from a dict""" if obj is None: return None if not isinstance(obj, dict): - return GdalMetadataNetCdfCf.parse_obj(obj) + return cls.model_validate(obj) - _obj = GdalMetadataNetCdfCf.parse_obj({ - "band_offset": obj.get("bandOffset"), - "cache_ttl": obj.get("cacheTtl"), + _obj = cls.model_validate({ + "bandOffset": obj.get("bandOffset"), + "cacheTtl": obj.get("cacheTtl"), "end": obj.get("end"), - "params": GdalDatasetParameters.from_dict(obj.get("params")) if obj.get("params") is not None else None, - "result_descriptor": RasterResultDescriptor.from_dict(obj.get("resultDescriptor")) if obj.get("resultDescriptor") is not None else None, + "params": GdalDatasetParameters.from_dict(obj["params"]) if obj.get("params") is not None else None, + "resultDescriptor": RasterResultDescriptor.from_dict(obj["resultDescriptor"]) if obj.get("resultDescriptor") is not None else None, "start": obj.get("start"), - "step": TimeStep.from_dict(obj.get("step")) if obj.get("step") is not None else None, + "step": TimeStep.from_dict(obj["step"]) if obj.get("step") is not None else None, "type": obj.get("type") }) return _obj diff --git a/python/geoengine_openapi_client/models/gdal_source_time_placeholder.py b/python/geoengine_openapi_client/models/gdal_source_time_placeholder.py index 47cabb16..34ddd25e 100644 --- a/python/geoengine_openapi_client/models/gdal_source_time_placeholder.py +++ b/python/geoengine_openapi_client/models/gdal_source_time_placeholder.py @@ -18,55 +18,71 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List from geoengine_openapi_client.models.time_reference import TimeReference +from typing import Optional, Set +from typing_extensions import Self class GdalSourceTimePlaceholder(BaseModel): """ GdalSourceTimePlaceholder - """ - format: StrictStr = Field(...) - reference: TimeReference = Field(...) - __properties = ["format", "reference"] + """ # noqa: E501 + format: StrictStr + reference: TimeReference + __properties: ClassVar[List[str]] = ["format", "reference"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> GdalSourceTimePlaceholder: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of GdalSourceTimePlaceholder from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> GdalSourceTimePlaceholder: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of GdalSourceTimePlaceholder from a dict""" if obj is None: return None if not isinstance(obj, dict): - return GdalSourceTimePlaceholder.parse_obj(obj) + return cls.model_validate(obj) - _obj = GdalSourceTimePlaceholder.parse_obj({ + _obj = cls.model_validate({ "format": obj.get("format"), "reference": obj.get("reference") }) diff --git a/python/geoengine_openapi_client/models/geo_json.py b/python/geoengine_openapi_client/models/geo_json.py index ff9f114c..65dd2489 100644 --- a/python/geoengine_openapi_client/models/geo_json.py +++ b/python/geoengine_openapi_client/models/geo_json.py @@ -18,55 +18,71 @@ import re # noqa: F401 import json - -from typing import Any, List -from pydantic import BaseModel, Field, conlist +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List from geoengine_openapi_client.models.collection_type import CollectionType +from typing import Optional, Set +from typing_extensions import Self class GeoJson(BaseModel): """ GeoJson - """ - features: conlist(Any) = Field(...) - type: CollectionType = Field(...) - __properties = ["features", "type"] + """ # noqa: E501 + features: List[Any] + type: CollectionType + __properties: ClassVar[List[str]] = ["features", "type"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> GeoJson: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of GeoJson from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> GeoJson: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of GeoJson from a dict""" if obj is None: return None if not isinstance(obj, dict): - return GeoJson.parse_obj(obj) + return cls.model_validate(obj) - _obj = GeoJson.parse_obj({ + _obj = cls.model_validate({ "features": obj.get("features"), "type": obj.get("type") }) diff --git a/python/geoengine_openapi_client/models/get_capabilities_format.py b/python/geoengine_openapi_client/models/get_capabilities_format.py index 9061c98c..58eac3b9 100644 --- a/python/geoengine_openapi_client/models/get_capabilities_format.py +++ b/python/geoengine_openapi_client/models/get_capabilities_format.py @@ -13,13 +13,10 @@ """ # noqa: E501 +from __future__ import annotations import json -import pprint -import re # noqa: F401 -from aenum import Enum, no_arg - - - +from enum import Enum +from typing_extensions import Self class GetCapabilitiesFormat(str, Enum): @@ -33,8 +30,8 @@ class GetCapabilitiesFormat(str, Enum): TEXT_SLASH_XML = 'text/xml' @classmethod - def from_json(cls, json_str: str) -> GetCapabilitiesFormat: + def from_json(cls, json_str: str) -> Self: """Create an instance of GetCapabilitiesFormat from a JSON string""" - return GetCapabilitiesFormat(json.loads(json_str)) + return cls(json.loads(json_str)) diff --git a/python/geoengine_openapi_client/models/get_capabilities_request.py b/python/geoengine_openapi_client/models/get_capabilities_request.py index 9c101416..07b5b608 100644 --- a/python/geoengine_openapi_client/models/get_capabilities_request.py +++ b/python/geoengine_openapi_client/models/get_capabilities_request.py @@ -13,13 +13,10 @@ """ # noqa: E501 +from __future__ import annotations import json -import pprint -import re # noqa: F401 -from aenum import Enum, no_arg - - - +from enum import Enum +from typing_extensions import Self class GetCapabilitiesRequest(str, Enum): @@ -33,8 +30,8 @@ class GetCapabilitiesRequest(str, Enum): GETCAPABILITIES = 'GetCapabilities' @classmethod - def from_json(cls, json_str: str) -> GetCapabilitiesRequest: + def from_json(cls, json_str: str) -> Self: """Create an instance of GetCapabilitiesRequest from a JSON string""" - return GetCapabilitiesRequest(json.loads(json_str)) + return cls(json.loads(json_str)) diff --git a/python/geoengine_openapi_client/models/get_coverage_format.py b/python/geoengine_openapi_client/models/get_coverage_format.py index cb706fe4..39d1d573 100644 --- a/python/geoengine_openapi_client/models/get_coverage_format.py +++ b/python/geoengine_openapi_client/models/get_coverage_format.py @@ -13,13 +13,10 @@ """ # noqa: E501 +from __future__ import annotations import json -import pprint -import re # noqa: F401 -from aenum import Enum, no_arg - - - +from enum import Enum +from typing_extensions import Self class GetCoverageFormat(str, Enum): @@ -33,8 +30,8 @@ class GetCoverageFormat(str, Enum): IMAGE_SLASH_TIFF = 'image/tiff' @classmethod - def from_json(cls, json_str: str) -> GetCoverageFormat: + def from_json(cls, json_str: str) -> Self: """Create an instance of GetCoverageFormat from a JSON string""" - return GetCoverageFormat(json.loads(json_str)) + return cls(json.loads(json_str)) diff --git a/python/geoengine_openapi_client/models/get_coverage_request.py b/python/geoengine_openapi_client/models/get_coverage_request.py index 7af14cdd..9df3ab15 100644 --- a/python/geoengine_openapi_client/models/get_coverage_request.py +++ b/python/geoengine_openapi_client/models/get_coverage_request.py @@ -13,13 +13,10 @@ """ # noqa: E501 +from __future__ import annotations import json -import pprint -import re # noqa: F401 -from aenum import Enum, no_arg - - - +from enum import Enum +from typing_extensions import Self class GetCoverageRequest(str, Enum): @@ -33,8 +30,8 @@ class GetCoverageRequest(str, Enum): GETCOVERAGE = 'GetCoverage' @classmethod - def from_json(cls, json_str: str) -> GetCoverageRequest: + def from_json(cls, json_str: str) -> Self: """Create an instance of GetCoverageRequest from a JSON string""" - return GetCoverageRequest(json.loads(json_str)) + return cls(json.loads(json_str)) diff --git a/python/geoengine_openapi_client/models/get_feature_request.py b/python/geoengine_openapi_client/models/get_feature_request.py index c0bcbae7..a4cd6056 100644 --- a/python/geoengine_openapi_client/models/get_feature_request.py +++ b/python/geoengine_openapi_client/models/get_feature_request.py @@ -13,13 +13,10 @@ """ # noqa: E501 +from __future__ import annotations import json -import pprint -import re # noqa: F401 -from aenum import Enum, no_arg - - - +from enum import Enum +from typing_extensions import Self class GetFeatureRequest(str, Enum): @@ -33,8 +30,8 @@ class GetFeatureRequest(str, Enum): GETFEATURE = 'GetFeature' @classmethod - def from_json(cls, json_str: str) -> GetFeatureRequest: + def from_json(cls, json_str: str) -> Self: """Create an instance of GetFeatureRequest from a JSON string""" - return GetFeatureRequest(json.loads(json_str)) + return cls(json.loads(json_str)) diff --git a/python/geoengine_openapi_client/models/get_legend_graphic_request.py b/python/geoengine_openapi_client/models/get_legend_graphic_request.py index 4a1a29b9..48aab927 100644 --- a/python/geoengine_openapi_client/models/get_legend_graphic_request.py +++ b/python/geoengine_openapi_client/models/get_legend_graphic_request.py @@ -13,13 +13,10 @@ """ # noqa: E501 +from __future__ import annotations import json -import pprint -import re # noqa: F401 -from aenum import Enum, no_arg - - - +from enum import Enum +from typing_extensions import Self class GetLegendGraphicRequest(str, Enum): @@ -33,8 +30,8 @@ class GetLegendGraphicRequest(str, Enum): GETLEGENDGRAPHIC = 'GetLegendGraphic' @classmethod - def from_json(cls, json_str: str) -> GetLegendGraphicRequest: + def from_json(cls, json_str: str) -> Self: """Create an instance of GetLegendGraphicRequest from a JSON string""" - return GetLegendGraphicRequest(json.loads(json_str)) + return cls(json.loads(json_str)) diff --git a/python/geoengine_openapi_client/models/get_map_exception_format.py b/python/geoengine_openapi_client/models/get_map_exception_format.py index 3a27ba95..5ef5613b 100644 --- a/python/geoengine_openapi_client/models/get_map_exception_format.py +++ b/python/geoengine_openapi_client/models/get_map_exception_format.py @@ -13,13 +13,10 @@ """ # noqa: E501 +from __future__ import annotations import json -import pprint -import re # noqa: F401 -from aenum import Enum, no_arg - - - +from enum import Enum +from typing_extensions import Self class GetMapExceptionFormat(str, Enum): @@ -34,8 +31,8 @@ class GetMapExceptionFormat(str, Enum): JSON = 'JSON' @classmethod - def from_json(cls, json_str: str) -> GetMapExceptionFormat: + def from_json(cls, json_str: str) -> Self: """Create an instance of GetMapExceptionFormat from a JSON string""" - return GetMapExceptionFormat(json.loads(json_str)) + return cls(json.loads(json_str)) diff --git a/python/geoengine_openapi_client/models/get_map_format.py b/python/geoengine_openapi_client/models/get_map_format.py index 93434f6f..1da506de 100644 --- a/python/geoengine_openapi_client/models/get_map_format.py +++ b/python/geoengine_openapi_client/models/get_map_format.py @@ -13,13 +13,10 @@ """ # noqa: E501 +from __future__ import annotations import json -import pprint -import re # noqa: F401 -from aenum import Enum, no_arg - - - +from enum import Enum +from typing_extensions import Self class GetMapFormat(str, Enum): @@ -33,8 +30,8 @@ class GetMapFormat(str, Enum): IMAGE_SLASH_PNG = 'image/png' @classmethod - def from_json(cls, json_str: str) -> GetMapFormat: + def from_json(cls, json_str: str) -> Self: """Create an instance of GetMapFormat from a JSON string""" - return GetMapFormat(json.loads(json_str)) + return cls(json.loads(json_str)) diff --git a/python/geoengine_openapi_client/models/get_map_request.py b/python/geoengine_openapi_client/models/get_map_request.py index 2411dcf5..9effd255 100644 --- a/python/geoengine_openapi_client/models/get_map_request.py +++ b/python/geoengine_openapi_client/models/get_map_request.py @@ -13,13 +13,10 @@ """ # noqa: E501 +from __future__ import annotations import json -import pprint -import re # noqa: F401 -from aenum import Enum, no_arg - - - +from enum import Enum +from typing_extensions import Self class GetMapRequest(str, Enum): @@ -33,8 +30,8 @@ class GetMapRequest(str, Enum): GETMAP = 'GetMap' @classmethod - def from_json(cls, json_str: str) -> GetMapRequest: + def from_json(cls, json_str: str) -> Self: """Create an instance of GetMapRequest from a JSON string""" - return GetMapRequest(json.loads(json_str)) + return cls(json.loads(json_str)) diff --git a/python/geoengine_openapi_client/models/inline_object.py b/python/geoengine_openapi_client/models/inline_object.py new file mode 100644 index 00000000..95df4a44 --- /dev/null +++ b/python/geoengine_openapi_client/models/inline_object.py @@ -0,0 +1,88 @@ +# coding: utf-8 + +""" + Geo Engine API + + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) + + The version of the OpenAPI document: 0.8.0 + Contact: dev@geoengine.de + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class InlineObject(BaseModel): + """ + InlineObject + """ # noqa: E501 + url: StrictStr + __properties: ClassVar[List[str]] = ["url"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of InlineObject from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of InlineObject from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "url": obj.get("url") + }) + return _obj + + diff --git a/python/geoengine_openapi_client/models/inline_object1.py b/python/geoengine_openapi_client/models/inline_object1.py new file mode 100644 index 00000000..2b4018b4 --- /dev/null +++ b/python/geoengine_openapi_client/models/inline_object1.py @@ -0,0 +1,88 @@ +# coding: utf-8 + +""" + Geo Engine API + + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) + + The version of the OpenAPI document: 0.8.0 + Contact: dev@geoengine.de + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class InlineObject1(BaseModel): + """ + InlineObject1 + """ # noqa: E501 + dataset_name: StrictStr = Field(alias="datasetName") + __properties: ClassVar[List[str]] = ["datasetName"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of InlineObject1 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of InlineObject1 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "datasetName": obj.get("datasetName") + }) + return _obj + + diff --git a/python/geoengine_openapi_client/models/inline_object2.py b/python/geoengine_openapi_client/models/inline_object2.py new file mode 100644 index 00000000..6dec31c2 --- /dev/null +++ b/python/geoengine_openapi_client/models/inline_object2.py @@ -0,0 +1,88 @@ +# coding: utf-8 + +""" + Geo Engine API + + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) + + The version of the OpenAPI document: 0.8.0 + Contact: dev@geoengine.de + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class InlineObject2(BaseModel): + """ + InlineObject2 + """ # noqa: E501 + id: StrictStr + __properties: ClassVar[List[str]] = ["id"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of InlineObject2 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of InlineObject2 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id") + }) + return _obj + + diff --git a/python/geoengine_openapi_client/models/internal_data_id.py b/python/geoengine_openapi_client/models/internal_data_id.py index c25ec397..948a67a6 100644 --- a/python/geoengine_openapi_client/models/internal_data_id.py +++ b/python/geoengine_openapi_client/models/internal_data_id.py @@ -18,62 +18,78 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr, validator +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self class InternalDataId(BaseModel): """ InternalDataId - """ - dataset_id: StrictStr = Field(..., alias="datasetId") - type: StrictStr = Field(...) - __properties = ["datasetId", "type"] + """ # noqa: E501 + dataset_id: StrictStr = Field(alias="datasetId") + type: StrictStr + __properties: ClassVar[List[str]] = ["datasetId", "type"] - @validator('type') + @field_validator('type') def type_validate_enum(cls, value): """Validates the enum""" - if value not in ('internal', 'external'): - raise ValueError("must be one of enum values ('internal', 'external')") + if value not in set(['internal']): + raise ValueError("must be one of enum values ('internal')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> InternalDataId: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of InternalDataId from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> InternalDataId: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of InternalDataId from a dict""" if obj is None: return None if not isinstance(obj, dict): - return InternalDataId.parse_obj(obj) + return cls.model_validate(obj) - _obj = InternalDataId.parse_obj({ - "dataset_id": obj.get("datasetId"), + _obj = cls.model_validate({ + "datasetId": obj.get("datasetId"), "type": obj.get("type") }) return _obj diff --git a/python/geoengine_openapi_client/models/layer.py b/python/geoengine_openapi_client/models/layer.py index 2a0f0480..6d0e81e5 100644 --- a/python/geoengine_openapi_client/models/layer.py +++ b/python/geoengine_openapi_client/models/layer.py @@ -18,50 +18,67 @@ import re # noqa: F401 import json - -from typing import Dict, List, Optional -from pydantic import BaseModel, Field, StrictStr, conlist +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing_extensions import Annotated from geoengine_openapi_client.models.provider_layer_id import ProviderLayerId from geoengine_openapi_client.models.symbology import Symbology from geoengine_openapi_client.models.workflow import Workflow +from typing import Optional, Set +from typing_extensions import Self class Layer(BaseModel): """ Layer - """ - description: StrictStr = Field(...) - id: ProviderLayerId = Field(...) - metadata: Optional[Dict[str, StrictStr]] = Field(None, description="metadata used for loading the data") - name: StrictStr = Field(...) - properties: Optional[conlist(conlist(StrictStr, max_items=2, min_items=2))] = Field(None, description="properties, for instance, to be rendered in the UI") + """ # noqa: E501 + description: StrictStr + id: ProviderLayerId + metadata: Optional[Dict[str, StrictStr]] = Field(default=None, description="metadata used for loading the data") + name: StrictStr + properties: Optional[List[Annotated[List[StrictStr], Field(min_length=2, max_length=2)]]] = Field(default=None, description="properties, for instance, to be rendered in the UI") symbology: Optional[Symbology] = None - workflow: Workflow = Field(...) - __properties = ["description", "id", "metadata", "name", "properties", "symbology", "workflow"] + workflow: Workflow + __properties: ClassVar[List[str]] = ["description", "id", "metadata", "name", "properties", "symbology", "workflow"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> Layer: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of Layer from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of id if self.id: _dict['id'] = self.id.to_dict() @@ -72,29 +89,29 @@ def to_dict(self): if self.workflow: _dict['workflow'] = self.workflow.to_dict() # set to None if symbology (nullable) is None - # and __fields_set__ contains the field - if self.symbology is None and "symbology" in self.__fields_set__: + # and model_fields_set contains the field + if self.symbology is None and "symbology" in self.model_fields_set: _dict['symbology'] = None return _dict @classmethod - def from_dict(cls, obj: dict) -> Layer: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of Layer from a dict""" if obj is None: return None if not isinstance(obj, dict): - return Layer.parse_obj(obj) + return cls.model_validate(obj) - _obj = Layer.parse_obj({ + _obj = cls.model_validate({ "description": obj.get("description"), - "id": ProviderLayerId.from_dict(obj.get("id")) if obj.get("id") is not None else None, + "id": ProviderLayerId.from_dict(obj["id"]) if obj.get("id") is not None else None, "metadata": obj.get("metadata"), "name": obj.get("name"), "properties": obj.get("properties"), - "symbology": Symbology.from_dict(obj.get("symbology")) if obj.get("symbology") is not None else None, - "workflow": Workflow.from_dict(obj.get("workflow")) if obj.get("workflow") is not None else None + "symbology": Symbology.from_dict(obj["symbology"]) if obj.get("symbology") is not None else None, + "workflow": Workflow.from_dict(obj["workflow"]) if obj.get("workflow") is not None else None }) return _obj diff --git a/python/geoengine_openapi_client/models/layer_collection.py b/python/geoengine_openapi_client/models/layer_collection.py index 2e81d393..ccd392c2 100644 --- a/python/geoengine_openapi_client/models/layer_collection.py +++ b/python/geoengine_openapi_client/models/layer_collection.py @@ -18,79 +18,96 @@ import re # noqa: F401 import json - -from typing import List, Optional -from pydantic import BaseModel, Field, StrictStr, conlist +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing_extensions import Annotated from geoengine_openapi_client.models.collection_item import CollectionItem from geoengine_openapi_client.models.provider_layer_collection_id import ProviderLayerCollectionId +from typing import Optional, Set +from typing_extensions import Self class LayerCollection(BaseModel): """ LayerCollection - """ - description: StrictStr = Field(...) - entry_label: Optional[StrictStr] = Field(None, alias="entryLabel", description="a common label for the collection's entries, if there is any") - id: ProviderLayerCollectionId = Field(...) - items: conlist(CollectionItem) = Field(...) - name: StrictStr = Field(...) - properties: conlist(conlist(StrictStr, max_items=2, min_items=2)) = Field(...) - __properties = ["description", "entryLabel", "id", "items", "name", "properties"] - - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + """ # noqa: E501 + description: StrictStr + entry_label: Optional[StrictStr] = Field(default=None, description="a common label for the collection's entries, if there is any", alias="entryLabel") + id: ProviderLayerCollectionId + items: List[CollectionItem] + name: StrictStr + properties: List[Annotated[List[StrictStr], Field(min_length=2, max_length=2)]] + __properties: ClassVar[List[str]] = ["description", "entryLabel", "id", "items", "name", "properties"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> LayerCollection: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of LayerCollection from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of id if self.id: _dict['id'] = self.id.to_dict() # override the default output from pydantic by calling `to_dict()` of each item in items (list) _items = [] if self.items: - for _item in self.items: - if _item: - _items.append(_item.to_dict()) + for _item_items in self.items: + if _item_items: + _items.append(_item_items.to_dict()) _dict['items'] = _items # set to None if entry_label (nullable) is None - # and __fields_set__ contains the field - if self.entry_label is None and "entry_label" in self.__fields_set__: + # and model_fields_set contains the field + if self.entry_label is None and "entry_label" in self.model_fields_set: _dict['entryLabel'] = None return _dict @classmethod - def from_dict(cls, obj: dict) -> LayerCollection: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of LayerCollection from a dict""" if obj is None: return None if not isinstance(obj, dict): - return LayerCollection.parse_obj(obj) + return cls.model_validate(obj) - _obj = LayerCollection.parse_obj({ + _obj = cls.model_validate({ "description": obj.get("description"), - "entry_label": obj.get("entryLabel"), - "id": ProviderLayerCollectionId.from_dict(obj.get("id")) if obj.get("id") is not None else None, - "items": [CollectionItem.from_dict(_item) for _item in obj.get("items")] if obj.get("items") is not None else None, + "entryLabel": obj.get("entryLabel"), + "id": ProviderLayerCollectionId.from_dict(obj["id"]) if obj.get("id") is not None else None, + "items": [CollectionItem.from_dict(_item) for _item in obj["items"]] if obj.get("items") is not None else None, "name": obj.get("name"), "properties": obj.get("properties") }) diff --git a/python/geoengine_openapi_client/models/layer_collection_listing.py b/python/geoengine_openapi_client/models/layer_collection_listing.py index 295fa325..533647da 100644 --- a/python/geoengine_openapi_client/models/layer_collection_listing.py +++ b/python/geoengine_openapi_client/models/layer_collection_listing.py @@ -18,70 +18,87 @@ import re # noqa: F401 import json - -from typing import List, Optional -from pydantic import BaseModel, Field, StrictStr, conlist, validator +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from typing_extensions import Annotated from geoengine_openapi_client.models.provider_layer_collection_id import ProviderLayerCollectionId +from typing import Optional, Set +from typing_extensions import Self class LayerCollectionListing(BaseModel): """ LayerCollectionListing - """ - description: StrictStr = Field(...) - id: ProviderLayerCollectionId = Field(...) - name: StrictStr = Field(...) - properties: Optional[conlist(conlist(StrictStr, max_items=2, min_items=2))] = None - type: StrictStr = Field(...) - __properties = ["description", "id", "name", "properties", "type"] - - @validator('type') + """ # noqa: E501 + description: StrictStr + id: ProviderLayerCollectionId + name: StrictStr + properties: Optional[List[Annotated[List[StrictStr], Field(min_length=2, max_length=2)]]] = None + type: StrictStr + __properties: ClassVar[List[str]] = ["description", "id", "name", "properties", "type"] + + @field_validator('type') def type_validate_enum(cls, value): """Validates the enum""" - if value not in ('collection', 'layer'): - raise ValueError("must be one of enum values ('collection', 'layer')") + if value not in set(['collection']): + raise ValueError("must be one of enum values ('collection')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> LayerCollectionListing: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of LayerCollectionListing from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of id if self.id: _dict['id'] = self.id.to_dict() return _dict @classmethod - def from_dict(cls, obj: dict) -> LayerCollectionListing: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of LayerCollectionListing from a dict""" if obj is None: return None if not isinstance(obj, dict): - return LayerCollectionListing.parse_obj(obj) + return cls.model_validate(obj) - _obj = LayerCollectionListing.parse_obj({ + _obj = cls.model_validate({ "description": obj.get("description"), - "id": ProviderLayerCollectionId.from_dict(obj.get("id")) if obj.get("id") is not None else None, + "id": ProviderLayerCollectionId.from_dict(obj["id"]) if obj.get("id") is not None else None, "name": obj.get("name"), "properties": obj.get("properties"), "type": obj.get("type") diff --git a/python/geoengine_openapi_client/models/layer_collection_resource.py b/python/geoengine_openapi_client/models/layer_collection_resource.py index 7c710f88..96c12874 100644 --- a/python/geoengine_openapi_client/models/layer_collection_resource.py +++ b/python/geoengine_openapi_client/models/layer_collection_resource.py @@ -18,61 +18,77 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr, validator +from pydantic import BaseModel, ConfigDict, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self class LayerCollectionResource(BaseModel): """ LayerCollectionResource - """ - id: StrictStr = Field(...) - type: StrictStr = Field(...) - __properties = ["id", "type"] + """ # noqa: E501 + id: StrictStr + type: StrictStr + __properties: ClassVar[List[str]] = ["id", "type"] - @validator('type') + @field_validator('type') def type_validate_enum(cls, value): """Validates the enum""" - if value not in ('layerCollection'): + if value not in set(['layerCollection']): raise ValueError("must be one of enum values ('layerCollection')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> LayerCollectionResource: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of LayerCollectionResource from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> LayerCollectionResource: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of LayerCollectionResource from a dict""" if obj is None: return None if not isinstance(obj, dict): - return LayerCollectionResource.parse_obj(obj) + return cls.model_validate(obj) - _obj = LayerCollectionResource.parse_obj({ + _obj = cls.model_validate({ "id": obj.get("id"), "type": obj.get("type") }) diff --git a/python/geoengine_openapi_client/models/layer_listing.py b/python/geoengine_openapi_client/models/layer_listing.py index 7add001c..50f807d2 100644 --- a/python/geoengine_openapi_client/models/layer_listing.py +++ b/python/geoengine_openapi_client/models/layer_listing.py @@ -18,70 +18,87 @@ import re # noqa: F401 import json - -from typing import List, Optional -from pydantic import BaseModel, Field, StrictStr, conlist, validator +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from typing_extensions import Annotated from geoengine_openapi_client.models.provider_layer_id import ProviderLayerId +from typing import Optional, Set +from typing_extensions import Self class LayerListing(BaseModel): """ LayerListing - """ - description: StrictStr = Field(...) - id: ProviderLayerId = Field(...) - name: StrictStr = Field(...) - properties: Optional[conlist(conlist(StrictStr, max_items=2, min_items=2))] = Field(None, description="properties, for instance, to be rendered in the UI") - type: StrictStr = Field(...) - __properties = ["description", "id", "name", "properties", "type"] - - @validator('type') + """ # noqa: E501 + description: StrictStr + id: ProviderLayerId + name: StrictStr + properties: Optional[List[Annotated[List[StrictStr], Field(min_length=2, max_length=2)]]] = Field(default=None, description="properties, for instance, to be rendered in the UI") + type: StrictStr + __properties: ClassVar[List[str]] = ["description", "id", "name", "properties", "type"] + + @field_validator('type') def type_validate_enum(cls, value): """Validates the enum""" - if value not in ('layer'): + if value not in set(['layer']): raise ValueError("must be one of enum values ('layer')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> LayerListing: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of LayerListing from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of id if self.id: _dict['id'] = self.id.to_dict() return _dict @classmethod - def from_dict(cls, obj: dict) -> LayerListing: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of LayerListing from a dict""" if obj is None: return None if not isinstance(obj, dict): - return LayerListing.parse_obj(obj) + return cls.model_validate(obj) - _obj = LayerListing.parse_obj({ + _obj = cls.model_validate({ "description": obj.get("description"), - "id": ProviderLayerId.from_dict(obj.get("id")) if obj.get("id") is not None else None, + "id": ProviderLayerId.from_dict(obj["id"]) if obj.get("id") is not None else None, "name": obj.get("name"), "properties": obj.get("properties"), "type": obj.get("type") diff --git a/python/geoengine_openapi_client/models/layer_resource.py b/python/geoengine_openapi_client/models/layer_resource.py index ea4d528a..1b4cab5f 100644 --- a/python/geoengine_openapi_client/models/layer_resource.py +++ b/python/geoengine_openapi_client/models/layer_resource.py @@ -18,61 +18,77 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr, validator +from pydantic import BaseModel, ConfigDict, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self class LayerResource(BaseModel): """ LayerResource - """ - id: StrictStr = Field(...) - type: StrictStr = Field(...) - __properties = ["id", "type"] + """ # noqa: E501 + id: StrictStr + type: StrictStr + __properties: ClassVar[List[str]] = ["id", "type"] - @validator('type') + @field_validator('type') def type_validate_enum(cls, value): """Validates the enum""" - if value not in ('layer', 'layerCollection', 'project', 'dataset', 'mlModel'): - raise ValueError("must be one of enum values ('layer', 'layerCollection', 'project', 'dataset', 'mlModel')") + if value not in set(['layer']): + raise ValueError("must be one of enum values ('layer')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> LayerResource: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of LayerResource from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> LayerResource: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of LayerResource from a dict""" if obj is None: return None if not isinstance(obj, dict): - return LayerResource.parse_obj(obj) + return cls.model_validate(obj) - _obj = LayerResource.parse_obj({ + _obj = cls.model_validate({ "id": obj.get("id"), "type": obj.get("type") }) diff --git a/python/geoengine_openapi_client/models/layer_visibility.py b/python/geoengine_openapi_client/models/layer_visibility.py index 4401c3f8..b7c9cd54 100644 --- a/python/geoengine_openapi_client/models/layer_visibility.py +++ b/python/geoengine_openapi_client/models/layer_visibility.py @@ -18,54 +18,70 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictBool +from pydantic import BaseModel, ConfigDict, StrictBool +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self class LayerVisibility(BaseModel): """ LayerVisibility - """ - data: StrictBool = Field(...) - legend: StrictBool = Field(...) - __properties = ["data", "legend"] + """ # noqa: E501 + data: StrictBool + legend: StrictBool + __properties: ClassVar[List[str]] = ["data", "legend"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> LayerVisibility: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of LayerVisibility from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> LayerVisibility: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of LayerVisibility from a dict""" if obj is None: return None if not isinstance(obj, dict): - return LayerVisibility.parse_obj(obj) + return cls.model_validate(obj) - _obj = LayerVisibility.parse_obj({ + _obj = cls.model_validate({ "data": obj.get("data"), "legend": obj.get("legend") }) diff --git a/python/geoengine_openapi_client/models/line_symbology.py b/python/geoengine_openapi_client/models/line_symbology.py index cc72c137..618681ea 100644 --- a/python/geoengine_openapi_client/models/line_symbology.py +++ b/python/geoengine_openapi_client/models/line_symbology.py @@ -18,53 +18,69 @@ import re # noqa: F401 import json - -from typing import Optional -from pydantic import BaseModel, Field, StrictBool, StrictStr, validator +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional from geoengine_openapi_client.models.stroke_param import StrokeParam from geoengine_openapi_client.models.text_symbology import TextSymbology +from typing import Optional, Set +from typing_extensions import Self class LineSymbology(BaseModel): """ LineSymbology - """ - auto_simplified: StrictBool = Field(..., alias="autoSimplified") - stroke: StrokeParam = Field(...) + """ # noqa: E501 + auto_simplified: StrictBool = Field(alias="autoSimplified") + stroke: StrokeParam text: Optional[TextSymbology] = None - type: StrictStr = Field(...) - __properties = ["autoSimplified", "stroke", "text", "type"] + type: StrictStr + __properties: ClassVar[List[str]] = ["autoSimplified", "stroke", "text", "type"] - @validator('type') + @field_validator('type') def type_validate_enum(cls, value): """Validates the enum""" - if value not in ('line'): + if value not in set(['line']): raise ValueError("must be one of enum values ('line')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> LineSymbology: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of LineSymbology from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of stroke if self.stroke: _dict['stroke'] = self.stroke.to_dict() @@ -72,25 +88,25 @@ def to_dict(self): if self.text: _dict['text'] = self.text.to_dict() # set to None if text (nullable) is None - # and __fields_set__ contains the field - if self.text is None and "text" in self.__fields_set__: + # and model_fields_set contains the field + if self.text is None and "text" in self.model_fields_set: _dict['text'] = None return _dict @classmethod - def from_dict(cls, obj: dict) -> LineSymbology: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of LineSymbology from a dict""" if obj is None: return None if not isinstance(obj, dict): - return LineSymbology.parse_obj(obj) + return cls.model_validate(obj) - _obj = LineSymbology.parse_obj({ - "auto_simplified": obj.get("autoSimplified"), - "stroke": StrokeParam.from_dict(obj.get("stroke")) if obj.get("stroke") is not None else None, - "text": TextSymbology.from_dict(obj.get("text")) if obj.get("text") is not None else None, + _obj = cls.model_validate({ + "autoSimplified": obj.get("autoSimplified"), + "stroke": StrokeParam.from_dict(obj["stroke"]) if obj.get("stroke") is not None else None, + "text": TextSymbology.from_dict(obj["text"]) if obj.get("text") is not None else None, "type": obj.get("type") }) return _obj diff --git a/python/geoengine_openapi_client/models/linear_gradient.py b/python/geoengine_openapi_client/models/linear_gradient.py index 69fb7431..2578d6d6 100644 --- a/python/geoengine_openapi_client/models/linear_gradient.py +++ b/python/geoengine_openapi_client/models/linear_gradient.py @@ -18,77 +18,94 @@ import re # noqa: F401 import json - -from typing import List -from pydantic import BaseModel, Field, StrictInt, StrictStr, conlist, validator +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing_extensions import Annotated from geoengine_openapi_client.models.breakpoint import Breakpoint +from typing import Optional, Set +from typing_extensions import Self class LinearGradient(BaseModel): """ LinearGradient - """ - breakpoints: conlist(Breakpoint) = Field(...) - no_data_color: conlist(StrictInt, max_items=4, min_items=4) = Field(..., alias="noDataColor") - over_color: conlist(StrictInt, max_items=4, min_items=4) = Field(..., alias="overColor") - type: StrictStr = Field(...) - under_color: conlist(StrictInt, max_items=4, min_items=4) = Field(..., alias="underColor") - __properties = ["breakpoints", "noDataColor", "overColor", "type", "underColor"] - - @validator('type') + """ # noqa: E501 + breakpoints: List[Breakpoint] + no_data_color: Annotated[List[StrictInt], Field(min_length=4, max_length=4)] = Field(alias="noDataColor") + over_color: Annotated[List[StrictInt], Field(min_length=4, max_length=4)] = Field(alias="overColor") + type: StrictStr + under_color: Annotated[List[StrictInt], Field(min_length=4, max_length=4)] = Field(alias="underColor") + __properties: ClassVar[List[str]] = ["breakpoints", "noDataColor", "overColor", "type", "underColor"] + + @field_validator('type') def type_validate_enum(cls, value): """Validates the enum""" - if value not in ('linearGradient', 'logarithmicGradient', 'palette'): - raise ValueError("must be one of enum values ('linearGradient', 'logarithmicGradient', 'palette')") + if value not in set(['linearGradient']): + raise ValueError("must be one of enum values ('linearGradient')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> LinearGradient: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of LinearGradient from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of each item in breakpoints (list) _items = [] if self.breakpoints: - for _item in self.breakpoints: - if _item: - _items.append(_item.to_dict()) + for _item_breakpoints in self.breakpoints: + if _item_breakpoints: + _items.append(_item_breakpoints.to_dict()) _dict['breakpoints'] = _items return _dict @classmethod - def from_dict(cls, obj: dict) -> LinearGradient: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of LinearGradient from a dict""" if obj is None: return None if not isinstance(obj, dict): - return LinearGradient.parse_obj(obj) + return cls.model_validate(obj) - _obj = LinearGradient.parse_obj({ - "breakpoints": [Breakpoint.from_dict(_item) for _item in obj.get("breakpoints")] if obj.get("breakpoints") is not None else None, - "no_data_color": obj.get("noDataColor"), - "over_color": obj.get("overColor"), + _obj = cls.model_validate({ + "breakpoints": [Breakpoint.from_dict(_item) for _item in obj["breakpoints"]] if obj.get("breakpoints") is not None else None, + "noDataColor": obj.get("noDataColor"), + "overColor": obj.get("overColor"), "type": obj.get("type"), - "under_color": obj.get("underColor") + "underColor": obj.get("underColor") }) return _obj diff --git a/python/geoengine_openapi_client/models/logarithmic_gradient.py b/python/geoengine_openapi_client/models/logarithmic_gradient.py index cb4737eb..98a479a5 100644 --- a/python/geoengine_openapi_client/models/logarithmic_gradient.py +++ b/python/geoengine_openapi_client/models/logarithmic_gradient.py @@ -18,77 +18,94 @@ import re # noqa: F401 import json - -from typing import List -from pydantic import BaseModel, Field, StrictInt, StrictStr, conlist, validator +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing_extensions import Annotated from geoengine_openapi_client.models.breakpoint import Breakpoint +from typing import Optional, Set +from typing_extensions import Self class LogarithmicGradient(BaseModel): """ LogarithmicGradient - """ - breakpoints: conlist(Breakpoint) = Field(...) - no_data_color: conlist(StrictInt, max_items=4, min_items=4) = Field(..., alias="noDataColor") - over_color: conlist(StrictInt, max_items=4, min_items=4) = Field(..., alias="overColor") - type: StrictStr = Field(...) - under_color: conlist(StrictInt, max_items=4, min_items=4) = Field(..., alias="underColor") - __properties = ["breakpoints", "noDataColor", "overColor", "type", "underColor"] - - @validator('type') + """ # noqa: E501 + breakpoints: List[Breakpoint] + no_data_color: Annotated[List[StrictInt], Field(min_length=4, max_length=4)] = Field(alias="noDataColor") + over_color: Annotated[List[StrictInt], Field(min_length=4, max_length=4)] = Field(alias="overColor") + type: StrictStr + under_color: Annotated[List[StrictInt], Field(min_length=4, max_length=4)] = Field(alias="underColor") + __properties: ClassVar[List[str]] = ["breakpoints", "noDataColor", "overColor", "type", "underColor"] + + @field_validator('type') def type_validate_enum(cls, value): """Validates the enum""" - if value not in ('logarithmicGradient'): + if value not in set(['logarithmicGradient']): raise ValueError("must be one of enum values ('logarithmicGradient')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> LogarithmicGradient: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of LogarithmicGradient from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of each item in breakpoints (list) _items = [] if self.breakpoints: - for _item in self.breakpoints: - if _item: - _items.append(_item.to_dict()) + for _item_breakpoints in self.breakpoints: + if _item_breakpoints: + _items.append(_item_breakpoints.to_dict()) _dict['breakpoints'] = _items return _dict @classmethod - def from_dict(cls, obj: dict) -> LogarithmicGradient: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of LogarithmicGradient from a dict""" if obj is None: return None if not isinstance(obj, dict): - return LogarithmicGradient.parse_obj(obj) + return cls.model_validate(obj) - _obj = LogarithmicGradient.parse_obj({ - "breakpoints": [Breakpoint.from_dict(_item) for _item in obj.get("breakpoints")] if obj.get("breakpoints") is not None else None, - "no_data_color": obj.get("noDataColor"), - "over_color": obj.get("overColor"), + _obj = cls.model_validate({ + "breakpoints": [Breakpoint.from_dict(_item) for _item in obj["breakpoints"]] if obj.get("breakpoints") is not None else None, + "noDataColor": obj.get("noDataColor"), + "overColor": obj.get("overColor"), "type": obj.get("type"), - "under_color": obj.get("underColor") + "underColor": obj.get("underColor") }) return _obj diff --git a/python/geoengine_openapi_client/models/measurement.py b/python/geoengine_openapi_client/models/measurement.py index e2dfdbf9..919adba2 100644 --- a/python/geoengine_openapi_client/models/measurement.py +++ b/python/geoengine_openapi_client/models/measurement.py @@ -14,18 +14,16 @@ from __future__ import annotations -from inspect import getfullargspec import json import pprint -import re # noqa: F401 - +from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator from typing import Any, List, Optional -from pydantic import BaseModel, Field, StrictStr, ValidationError, validator from geoengine_openapi_client.models.classification_measurement import ClassificationMeasurement from geoengine_openapi_client.models.continuous_measurement import ContinuousMeasurement from geoengine_openapi_client.models.unitless_measurement import UnitlessMeasurement -from typing import Union, Any, List, TYPE_CHECKING from pydantic import StrictStr, Field +from typing import Union, List, Set, Optional, Dict +from typing_extensions import Literal, Self MEASUREMENT_ONE_OF_SCHEMAS = ["ClassificationMeasurement", "ContinuousMeasurement", "UnitlessMeasurement"] @@ -39,16 +37,16 @@ class Measurement(BaseModel): oneof_schema_2_validator: Optional[ContinuousMeasurement] = None # data type: ClassificationMeasurement oneof_schema_3_validator: Optional[ClassificationMeasurement] = None - if TYPE_CHECKING: - actual_instance: Union[ClassificationMeasurement, ContinuousMeasurement, UnitlessMeasurement] - else: - actual_instance: Any - one_of_schemas: List[str] = Field(MEASUREMENT_ONE_OF_SCHEMAS, const=True) + actual_instance: Optional[Union[ClassificationMeasurement, ContinuousMeasurement, UnitlessMeasurement]] = None + one_of_schemas: Set[str] = { "ClassificationMeasurement", "ContinuousMeasurement", "UnitlessMeasurement" } + + model_config = ConfigDict( + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - validate_assignment = True - discriminator_value_class_map = { + discriminator_value_class_map: Dict[str, str] = { } def __init__(self, *args, **kwargs) -> None: @@ -61,9 +59,9 @@ def __init__(self, *args, **kwargs) -> None: else: super().__init__(**kwargs) - @validator('actual_instance') + @field_validator('actual_instance') def actual_instance_must_validate_oneof(cls, v): - instance = Measurement.construct() + instance = Measurement.model_construct() error_messages = [] match = 0 # validate data type: UnitlessMeasurement @@ -91,13 +89,13 @@ def actual_instance_must_validate_oneof(cls, v): return v @classmethod - def from_dict(cls, obj: dict) -> Measurement: + def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: return cls.from_json(json.dumps(obj)) @classmethod - def from_json(cls, json_str: str) -> Measurement: + def from_json(cls, json_str: str) -> Self: """Returns the object represented by the json string""" - instance = Measurement.construct() + instance = cls.model_construct() error_messages = [] match = 0 @@ -107,32 +105,32 @@ def from_json(cls, json_str: str) -> Measurement: raise ValueError("Failed to lookup data type from the field `type` in the input.") # check if data type is `ClassificationMeasurement` - if _data_type == "ClassificationMeasurement": + if _data_type == "classification": instance.actual_instance = ClassificationMeasurement.from_json(json_str) return instance # check if data type is `ContinuousMeasurement` - if _data_type == "ContinuousMeasurement": + if _data_type == "continuous": instance.actual_instance = ContinuousMeasurement.from_json(json_str) return instance # check if data type is `UnitlessMeasurement` - if _data_type == "UnitlessMeasurement": + if _data_type == "unitless": instance.actual_instance = UnitlessMeasurement.from_json(json_str) return instance # check if data type is `ClassificationMeasurement` - if _data_type == "classification": + if _data_type == "ClassificationMeasurement": instance.actual_instance = ClassificationMeasurement.from_json(json_str) return instance # check if data type is `ContinuousMeasurement` - if _data_type == "continuous": + if _data_type == "ContinuousMeasurement": instance.actual_instance = ContinuousMeasurement.from_json(json_str) return instance # check if data type is `UnitlessMeasurement` - if _data_type == "unitless": + if _data_type == "UnitlessMeasurement": instance.actual_instance = UnitlessMeasurement.from_json(json_str) return instance @@ -169,19 +167,17 @@ def to_json(self) -> str: if self.actual_instance is None: return "null" - to_json = getattr(self.actual_instance, "to_json", None) - if callable(to_json): + if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): return self.actual_instance.to_json() else: return json.dumps(self.actual_instance) - def to_dict(self) -> dict: + def to_dict(self) -> Optional[Union[Dict[str, Any], ClassificationMeasurement, ContinuousMeasurement, UnitlessMeasurement]]: """Returns the dict representation of the actual instance""" if self.actual_instance is None: return None - to_dict = getattr(self.actual_instance, "to_dict", None) - if callable(to_dict): + if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): return self.actual_instance.to_dict() else: # primitive type @@ -189,6 +185,6 @@ def to_dict(self) -> dict: def to_str(self) -> str: """Returns the string representation of the actual instance""" - return pprint.pformat(self.dict()) + return pprint.pformat(self.model_dump()) diff --git a/python/geoengine_openapi_client/models/meta_data_definition.py b/python/geoengine_openapi_client/models/meta_data_definition.py index 46ecccae..f1c70bc4 100644 --- a/python/geoengine_openapi_client/models/meta_data_definition.py +++ b/python/geoengine_openapi_client/models/meta_data_definition.py @@ -14,32 +14,29 @@ from __future__ import annotations -from inspect import getfullargspec import json import pprint -import re # noqa: F401 - +from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator from typing import Any, List, Optional -from pydantic import BaseModel, Field, StrictStr, ValidationError, validator from geoengine_openapi_client.models.gdal_meta_data_list import GdalMetaDataList from geoengine_openapi_client.models.gdal_meta_data_regular import GdalMetaDataRegular from geoengine_openapi_client.models.gdal_meta_data_static import GdalMetaDataStatic from geoengine_openapi_client.models.gdal_metadata_net_cdf_cf import GdalMetadataNetCdfCf -from geoengine_openapi_client.models.mock_meta_data import MockMetaData -from geoengine_openapi_client.models.ogr_meta_data import OgrMetaData -from typing import Union, Any, List, TYPE_CHECKING +from geoengine_openapi_client.models.static_meta_data import StaticMetaData from pydantic import StrictStr, Field +from typing import Union, List, Set, Optional, Dict +from typing_extensions import Literal, Self -METADATADEFINITION_ONE_OF_SCHEMAS = ["GdalMetaDataList", "GdalMetaDataRegular", "GdalMetaDataStatic", "GdalMetadataNetCdfCf", "MockMetaData", "OgrMetaData"] +METADATADEFINITION_ONE_OF_SCHEMAS = ["GdalMetaDataList", "GdalMetaDataRegular", "GdalMetaDataStatic", "GdalMetadataNetCdfCf", "StaticMetaData"] class MetaDataDefinition(BaseModel): """ MetaDataDefinition """ - # data type: MockMetaData - oneof_schema_1_validator: Optional[MockMetaData] = None - # data type: OgrMetaData - oneof_schema_2_validator: Optional[OgrMetaData] = None + # data type: StaticMetaData + oneof_schema_1_validator: Optional[StaticMetaData] = None + # data type: StaticMetaData + oneof_schema_2_validator: Optional[StaticMetaData] = None # data type: GdalMetaDataRegular oneof_schema_3_validator: Optional[GdalMetaDataRegular] = None # data type: GdalMetaDataStatic @@ -48,16 +45,16 @@ class MetaDataDefinition(BaseModel): oneof_schema_5_validator: Optional[GdalMetadataNetCdfCf] = None # data type: GdalMetaDataList oneof_schema_6_validator: Optional[GdalMetaDataList] = None - if TYPE_CHECKING: - actual_instance: Union[GdalMetaDataList, GdalMetaDataRegular, GdalMetaDataStatic, GdalMetadataNetCdfCf, MockMetaData, OgrMetaData] - else: - actual_instance: Any - one_of_schemas: List[str] = Field(METADATADEFINITION_ONE_OF_SCHEMAS, const=True) + actual_instance: Optional[Union[GdalMetaDataList, GdalMetaDataRegular, GdalMetaDataStatic, GdalMetadataNetCdfCf, StaticMetaData]] = None + one_of_schemas: Set[str] = { "GdalMetaDataList", "GdalMetaDataRegular", "GdalMetaDataStatic", "GdalMetadataNetCdfCf", "StaticMetaData" } + + model_config = ConfigDict( + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - validate_assignment = True - discriminator_value_class_map = { + discriminator_value_class_map: Dict[str, str] = { } def __init__(self, *args, **kwargs) -> None: @@ -70,19 +67,19 @@ def __init__(self, *args, **kwargs) -> None: else: super().__init__(**kwargs) - @validator('actual_instance') + @field_validator('actual_instance') def actual_instance_must_validate_oneof(cls, v): - instance = MetaDataDefinition.construct() + instance = MetaDataDefinition.model_construct() error_messages = [] match = 0 - # validate data type: MockMetaData - if not isinstance(v, MockMetaData): - error_messages.append(f"Error! Input type `{type(v)}` is not `MockMetaData`") + # validate data type: StaticMetaData + if not isinstance(v, StaticMetaData): + error_messages.append(f"Error! Input type `{type(v)}` is not `StaticMetaData`") else: match += 1 - # validate data type: OgrMetaData - if not isinstance(v, OgrMetaData): - error_messages.append(f"Error! Input type `{type(v)}` is not `OgrMetaData`") + # validate data type: StaticMetaData + if not isinstance(v, StaticMetaData): + error_messages.append(f"Error! Input type `{type(v)}` is not `StaticMetaData`") else: match += 1 # validate data type: GdalMetaDataRegular @@ -107,21 +104,21 @@ def actual_instance_must_validate_oneof(cls, v): match += 1 if match > 1: # more than 1 match - raise ValueError("Multiple matches found when setting `actual_instance` in MetaDataDefinition with oneOf schemas: GdalMetaDataList, GdalMetaDataRegular, GdalMetaDataStatic, GdalMetadataNetCdfCf, MockMetaData, OgrMetaData. Details: " + ", ".join(error_messages)) + raise ValueError("Multiple matches found when setting `actual_instance` in MetaDataDefinition with oneOf schemas: GdalMetaDataList, GdalMetaDataRegular, GdalMetaDataStatic, GdalMetadataNetCdfCf, StaticMetaData. Details: " + ", ".join(error_messages)) elif match == 0: # no match - raise ValueError("No match found when setting `actual_instance` in MetaDataDefinition with oneOf schemas: GdalMetaDataList, GdalMetaDataRegular, GdalMetaDataStatic, GdalMetadataNetCdfCf, MockMetaData, OgrMetaData. Details: " + ", ".join(error_messages)) + raise ValueError("No match found when setting `actual_instance` in MetaDataDefinition with oneOf schemas: GdalMetaDataList, GdalMetaDataRegular, GdalMetaDataStatic, GdalMetadataNetCdfCf, StaticMetaData. Details: " + ", ".join(error_messages)) else: return v @classmethod - def from_dict(cls, obj: dict) -> MetaDataDefinition: + def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: return cls.from_json(json.dumps(obj)) @classmethod - def from_json(cls, json_str: str) -> MetaDataDefinition: + def from_json(cls, json_str: str) -> Self: """Returns the object represented by the json string""" - instance = MetaDataDefinition.construct() + instance = cls.model_construct() error_messages = [] match = 0 @@ -140,11 +137,6 @@ def from_json(cls, json_str: str) -> MetaDataDefinition: instance.actual_instance = GdalMetaDataRegular.from_json(json_str) return instance - # check if data type is `GdalMetaDataStatic` - if _data_type == "GdalMetaDataStatic": - instance.actual_instance = GdalMetaDataStatic.from_json(json_str) - return instance - # check if data type is `GdalMetadataNetCdfCf` if _data_type == "GdalMetadataNetCdfCf": instance.actual_instance = GdalMetadataNetCdfCf.from_json(json_str) @@ -155,25 +147,35 @@ def from_json(cls, json_str: str) -> MetaDataDefinition: instance.actual_instance = GdalMetaDataStatic.from_json(json_str) return instance - # check if data type is `MockMetaData` + # check if data type is `StaticMetaData` if _data_type == "MockMetaData": - instance.actual_instance = MockMetaData.from_json(json_str) + instance.actual_instance = StaticMetaData.from_json(json_str) return instance - # check if data type is `OgrMetaData` + # check if data type is `StaticMetaData` if _data_type == "OgrMetaData": - instance.actual_instance = OgrMetaData.from_json(json_str) + instance.actual_instance = StaticMetaData.from_json(json_str) + return instance + + # check if data type is `GdalMetaDataStatic` + if _data_type == "GdalMetaDataStatic": + instance.actual_instance = GdalMetaDataStatic.from_json(json_str) + return instance + + # check if data type is `StaticMetaData` + if _data_type == "StaticMetaData": + instance.actual_instance = StaticMetaData.from_json(json_str) return instance - # deserialize data into MockMetaData + # deserialize data into StaticMetaData try: - instance.actual_instance = MockMetaData.from_json(json_str) + instance.actual_instance = StaticMetaData.from_json(json_str) match += 1 except (ValidationError, ValueError) as e: error_messages.append(str(e)) - # deserialize data into OgrMetaData + # deserialize data into StaticMetaData try: - instance.actual_instance = OgrMetaData.from_json(json_str) + instance.actual_instance = StaticMetaData.from_json(json_str) match += 1 except (ValidationError, ValueError) as e: error_messages.append(str(e)) @@ -204,10 +206,10 @@ def from_json(cls, json_str: str) -> MetaDataDefinition: if match > 1: # more than 1 match - raise ValueError("Multiple matches found when deserializing the JSON string into MetaDataDefinition with oneOf schemas: GdalMetaDataList, GdalMetaDataRegular, GdalMetaDataStatic, GdalMetadataNetCdfCf, MockMetaData, OgrMetaData. Details: " + ", ".join(error_messages)) + raise ValueError("Multiple matches found when deserializing the JSON string into MetaDataDefinition with oneOf schemas: GdalMetaDataList, GdalMetaDataRegular, GdalMetaDataStatic, GdalMetadataNetCdfCf, StaticMetaData. Details: " + ", ".join(error_messages)) elif match == 0: # no match - raise ValueError("No match found when deserializing the JSON string into MetaDataDefinition with oneOf schemas: GdalMetaDataList, GdalMetaDataRegular, GdalMetaDataStatic, GdalMetadataNetCdfCf, MockMetaData, OgrMetaData. Details: " + ", ".join(error_messages)) + raise ValueError("No match found when deserializing the JSON string into MetaDataDefinition with oneOf schemas: GdalMetaDataList, GdalMetaDataRegular, GdalMetaDataStatic, GdalMetadataNetCdfCf, StaticMetaData. Details: " + ", ".join(error_messages)) else: return instance @@ -216,19 +218,17 @@ def to_json(self) -> str: if self.actual_instance is None: return "null" - to_json = getattr(self.actual_instance, "to_json", None) - if callable(to_json): + if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): return self.actual_instance.to_json() else: return json.dumps(self.actual_instance) - def to_dict(self) -> dict: + def to_dict(self) -> Optional[Union[Dict[str, Any], GdalMetaDataList, GdalMetaDataRegular, GdalMetaDataStatic, GdalMetadataNetCdfCf, StaticMetaData]]: """Returns the dict representation of the actual instance""" if self.actual_instance is None: return None - to_dict = getattr(self.actual_instance, "to_dict", None) - if callable(to_dict): + if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): return self.actual_instance.to_dict() else: # primitive type @@ -236,6 +236,6 @@ def to_dict(self) -> dict: def to_str(self) -> str: """Returns the string representation of the actual instance""" - return pprint.pformat(self.dict()) + return pprint.pformat(self.model_dump()) diff --git a/python/geoengine_openapi_client/models/meta_data_suggestion.py b/python/geoengine_openapi_client/models/meta_data_suggestion.py index e75453ba..14a5d587 100644 --- a/python/geoengine_openapi_client/models/meta_data_suggestion.py +++ b/python/geoengine_openapi_client/models/meta_data_suggestion.py @@ -18,62 +18,78 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List from geoengine_openapi_client.models.meta_data_definition import MetaDataDefinition +from typing import Optional, Set +from typing_extensions import Self class MetaDataSuggestion(BaseModel): """ MetaDataSuggestion - """ - layer_name: StrictStr = Field(..., alias="layerName") - main_file: StrictStr = Field(..., alias="mainFile") - meta_data: MetaDataDefinition = Field(..., alias="metaData") - __properties = ["layerName", "mainFile", "metaData"] + """ # noqa: E501 + layer_name: StrictStr = Field(alias="layerName") + main_file: StrictStr = Field(alias="mainFile") + meta_data: MetaDataDefinition = Field(alias="metaData") + __properties: ClassVar[List[str]] = ["layerName", "mainFile", "metaData"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> MetaDataSuggestion: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of MetaDataSuggestion from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of meta_data if self.meta_data: _dict['metaData'] = self.meta_data.to_dict() return _dict @classmethod - def from_dict(cls, obj: dict) -> MetaDataSuggestion: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of MetaDataSuggestion from a dict""" if obj is None: return None if not isinstance(obj, dict): - return MetaDataSuggestion.parse_obj(obj) + return cls.model_validate(obj) - _obj = MetaDataSuggestion.parse_obj({ - "layer_name": obj.get("layerName"), - "main_file": obj.get("mainFile"), - "meta_data": MetaDataDefinition.from_dict(obj.get("metaData")) if obj.get("metaData") is not None else None + _obj = cls.model_validate({ + "layerName": obj.get("layerName"), + "mainFile": obj.get("mainFile"), + "metaData": MetaDataDefinition.from_dict(obj["metaData"]) if obj.get("metaData") is not None else None }) return _obj diff --git a/python/geoengine_openapi_client/models/ml_model.py b/python/geoengine_openapi_client/models/ml_model.py index f4b3093d..4e9068fb 100644 --- a/python/geoengine_openapi_client/models/ml_model.py +++ b/python/geoengine_openapi_client/models/ml_model.py @@ -18,64 +18,80 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List from geoengine_openapi_client.models.ml_model_metadata import MlModelMetadata +from typing import Optional, Set +from typing_extensions import Self class MlModel(BaseModel): """ MlModel - """ - description: StrictStr = Field(...) - display_name: StrictStr = Field(..., alias="displayName") - metadata: MlModelMetadata = Field(...) - name: StrictStr = Field(...) - upload: StrictStr = Field(...) - __properties = ["description", "displayName", "metadata", "name", "upload"] - - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + """ # noqa: E501 + description: StrictStr + display_name: StrictStr = Field(alias="displayName") + metadata: MlModelMetadata + name: StrictStr + upload: StrictStr + __properties: ClassVar[List[str]] = ["description", "displayName", "metadata", "name", "upload"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> MlModel: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of MlModel from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of metadata if self.metadata: _dict['metadata'] = self.metadata.to_dict() return _dict @classmethod - def from_dict(cls, obj: dict) -> MlModel: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of MlModel from a dict""" if obj is None: return None if not isinstance(obj, dict): - return MlModel.parse_obj(obj) + return cls.model_validate(obj) - _obj = MlModel.parse_obj({ + _obj = cls.model_validate({ "description": obj.get("description"), - "display_name": obj.get("displayName"), - "metadata": MlModelMetadata.from_dict(obj.get("metadata")) if obj.get("metadata") is not None else None, + "displayName": obj.get("displayName"), + "metadata": MlModelMetadata.from_dict(obj["metadata"]) if obj.get("metadata") is not None else None, "name": obj.get("name"), "upload": obj.get("upload") }) diff --git a/python/geoengine_openapi_client/models/ml_model_metadata.py b/python/geoengine_openapi_client/models/ml_model_metadata.py index 308a5052..7587f379 100644 --- a/python/geoengine_openapi_client/models/ml_model_metadata.py +++ b/python/geoengine_openapi_client/models/ml_model_metadata.py @@ -18,61 +18,78 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr, conint +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from typing_extensions import Annotated from geoengine_openapi_client.models.raster_data_type import RasterDataType +from typing import Optional, Set +from typing_extensions import Self class MlModelMetadata(BaseModel): """ MlModelMetadata - """ - file_name: StrictStr = Field(..., alias="fileName") - input_type: RasterDataType = Field(..., alias="inputType") - num_input_bands: conint(strict=True, ge=0) = Field(..., alias="numInputBands") - output_type: RasterDataType = Field(..., alias="outputType") - __properties = ["fileName", "inputType", "numInputBands", "outputType"] + """ # noqa: E501 + file_name: StrictStr = Field(alias="fileName") + input_type: RasterDataType = Field(alias="inputType") + num_input_bands: Annotated[int, Field(strict=True, ge=0)] = Field(alias="numInputBands") + output_type: RasterDataType = Field(alias="outputType") + __properties: ClassVar[List[str]] = ["fileName", "inputType", "numInputBands", "outputType"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> MlModelMetadata: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of MlModelMetadata from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> MlModelMetadata: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of MlModelMetadata from a dict""" if obj is None: return None if not isinstance(obj, dict): - return MlModelMetadata.parse_obj(obj) + return cls.model_validate(obj) - _obj = MlModelMetadata.parse_obj({ - "file_name": obj.get("fileName"), - "input_type": obj.get("inputType"), - "num_input_bands": obj.get("numInputBands"), - "output_type": obj.get("outputType") + _obj = cls.model_validate({ + "fileName": obj.get("fileName"), + "inputType": obj.get("inputType"), + "numInputBands": obj.get("numInputBands"), + "outputType": obj.get("outputType") }) return _obj diff --git a/python/geoengine_openapi_client/models/ml_model_name_response.py b/python/geoengine_openapi_client/models/ml_model_name_response.py index 87b46c69..a9746c50 100644 --- a/python/geoengine_openapi_client/models/ml_model_name_response.py +++ b/python/geoengine_openapi_client/models/ml_model_name_response.py @@ -18,54 +18,70 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self class MlModelNameResponse(BaseModel): """ MlModelNameResponse - """ - ml_model_name: StrictStr = Field(..., alias="mlModelName") - __properties = ["mlModelName"] + """ # noqa: E501 + ml_model_name: StrictStr = Field(alias="mlModelName") + __properties: ClassVar[List[str]] = ["mlModelName"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> MlModelNameResponse: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of MlModelNameResponse from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> MlModelNameResponse: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of MlModelNameResponse from a dict""" if obj is None: return None if not isinstance(obj, dict): - return MlModelNameResponse.parse_obj(obj) + return cls.model_validate(obj) - _obj = MlModelNameResponse.parse_obj({ - "ml_model_name": obj.get("mlModelName") + _obj = cls.model_validate({ + "mlModelName": obj.get("mlModelName") }) return _obj diff --git a/python/geoengine_openapi_client/models/ml_model_resource.py b/python/geoengine_openapi_client/models/ml_model_resource.py index e5072648..bd6eccfd 100644 --- a/python/geoengine_openapi_client/models/ml_model_resource.py +++ b/python/geoengine_openapi_client/models/ml_model_resource.py @@ -18,61 +18,77 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr, validator +from pydantic import BaseModel, ConfigDict, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self class MlModelResource(BaseModel): """ MlModelResource - """ - id: StrictStr = Field(...) - type: StrictStr = Field(...) - __properties = ["id", "type"] + """ # noqa: E501 + id: StrictStr + type: StrictStr + __properties: ClassVar[List[str]] = ["id", "type"] - @validator('type') + @field_validator('type') def type_validate_enum(cls, value): """Validates the enum""" - if value not in ('mlModel'): + if value not in set(['mlModel']): raise ValueError("must be one of enum values ('mlModel')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> MlModelResource: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of MlModelResource from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> MlModelResource: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of MlModelResource from a dict""" if obj is None: return None if not isinstance(obj, dict): - return MlModelResource.parse_obj(obj) + return cls.model_validate(obj) - _obj = MlModelResource.parse_obj({ + _obj = cls.model_validate({ "id": obj.get("id"), "type": obj.get("type") }) diff --git a/python/geoengine_openapi_client/models/mock_dataset_data_source_loading_info.py b/python/geoengine_openapi_client/models/mock_dataset_data_source_loading_info.py index b6763f75..924098dd 100644 --- a/python/geoengine_openapi_client/models/mock_dataset_data_source_loading_info.py +++ b/python/geoengine_openapi_client/models/mock_dataset_data_source_loading_info.py @@ -18,62 +18,78 @@ import re # noqa: F401 import json - -from typing import List -from pydantic import BaseModel, Field, conlist +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List from geoengine_openapi_client.models.coordinate2_d import Coordinate2D +from typing import Optional, Set +from typing_extensions import Self class MockDatasetDataSourceLoadingInfo(BaseModel): """ MockDatasetDataSourceLoadingInfo - """ - points: conlist(Coordinate2D) = Field(...) - __properties = ["points"] + """ # noqa: E501 + points: List[Coordinate2D] + __properties: ClassVar[List[str]] = ["points"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> MockDatasetDataSourceLoadingInfo: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of MockDatasetDataSourceLoadingInfo from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of each item in points (list) _items = [] if self.points: - for _item in self.points: - if _item: - _items.append(_item.to_dict()) + for _item_points in self.points: + if _item_points: + _items.append(_item_points.to_dict()) _dict['points'] = _items return _dict @classmethod - def from_dict(cls, obj: dict) -> MockDatasetDataSourceLoadingInfo: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of MockDatasetDataSourceLoadingInfo from a dict""" if obj is None: return None if not isinstance(obj, dict): - return MockDatasetDataSourceLoadingInfo.parse_obj(obj) + return cls.model_validate(obj) - _obj = MockDatasetDataSourceLoadingInfo.parse_obj({ - "points": [Coordinate2D.from_dict(_item) for _item in obj.get("points")] if obj.get("points") is not None else None + _obj = cls.model_validate({ + "points": [Coordinate2D.from_dict(_item) for _item in obj["points"]] if obj.get("points") is not None else None }) return _obj diff --git a/python/geoengine_openapi_client/models/multi_band_raster_colorizer.py b/python/geoengine_openapi_client/models/multi_band_raster_colorizer.py index 873c4f3f..cc332b03 100644 --- a/python/geoengine_openapi_client/models/multi_band_raster_colorizer.py +++ b/python/geoengine_openapi_client/models/multi_band_raster_colorizer.py @@ -18,86 +18,103 @@ import re # noqa: F401 import json - -from typing import List, Optional, Union -from pydantic import BaseModel, Field, StrictFloat, StrictInt, StrictStr, conint, conlist, validator +from pydantic import BaseModel, ConfigDict, Field, StrictFloat, StrictInt, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional, Union +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self class MultiBandRasterColorizer(BaseModel): """ MultiBandRasterColorizer - """ - blue_band: conint(strict=True, ge=0) = Field(..., alias="blueBand", description="The band index of the blue channel.") - blue_max: Union[StrictFloat, StrictInt] = Field(..., alias="blueMax", description="The maximum value for the red channel.") - blue_min: Union[StrictFloat, StrictInt] = Field(..., alias="blueMin", description="The minimum value for the red channel.") - blue_scale: Optional[Union[StrictFloat, StrictInt]] = Field(None, alias="blueScale", description="A scaling factor for the blue channel between 0 and 1.") - green_band: conint(strict=True, ge=0) = Field(..., alias="greenBand", description="The band index of the green channel.") - green_max: Union[StrictFloat, StrictInt] = Field(..., alias="greenMax", description="The maximum value for the red channel.") - green_min: Union[StrictFloat, StrictInt] = Field(..., alias="greenMin", description="The minimum value for the red channel.") - green_scale: Optional[Union[StrictFloat, StrictInt]] = Field(None, alias="greenScale", description="A scaling factor for the green channel between 0 and 1.") - no_data_color: Optional[conlist(StrictInt, max_items=4, min_items=4)] = Field(None, alias="noDataColor") - red_band: conint(strict=True, ge=0) = Field(..., alias="redBand", description="The band index of the red channel.") - red_max: Union[StrictFloat, StrictInt] = Field(..., alias="redMax", description="The maximum value for the red channel.") - red_min: Union[StrictFloat, StrictInt] = Field(..., alias="redMin", description="The minimum value for the red channel.") - red_scale: Optional[Union[StrictFloat, StrictInt]] = Field(None, alias="redScale", description="A scaling factor for the red channel between 0 and 1.") - type: StrictStr = Field(...) - __properties = ["blueBand", "blueMax", "blueMin", "blueScale", "greenBand", "greenMax", "greenMin", "greenScale", "noDataColor", "redBand", "redMax", "redMin", "redScale", "type"] - - @validator('type') + """ # noqa: E501 + blue_band: Annotated[int, Field(strict=True, ge=0)] = Field(description="The band index of the blue channel.", alias="blueBand") + blue_max: Union[StrictFloat, StrictInt] = Field(description="The maximum value for the red channel.", alias="blueMax") + blue_min: Union[StrictFloat, StrictInt] = Field(description="The minimum value for the red channel.", alias="blueMin") + blue_scale: Optional[Union[StrictFloat, StrictInt]] = Field(default=None, description="A scaling factor for the blue channel between 0 and 1.", alias="blueScale") + green_band: Annotated[int, Field(strict=True, ge=0)] = Field(description="The band index of the green channel.", alias="greenBand") + green_max: Union[StrictFloat, StrictInt] = Field(description="The maximum value for the red channel.", alias="greenMax") + green_min: Union[StrictFloat, StrictInt] = Field(description="The minimum value for the red channel.", alias="greenMin") + green_scale: Optional[Union[StrictFloat, StrictInt]] = Field(default=None, description="A scaling factor for the green channel between 0 and 1.", alias="greenScale") + no_data_color: Optional[Annotated[List[StrictInt], Field(min_length=4, max_length=4)]] = Field(default=None, alias="noDataColor") + red_band: Annotated[int, Field(strict=True, ge=0)] = Field(description="The band index of the red channel.", alias="redBand") + red_max: Union[StrictFloat, StrictInt] = Field(description="The maximum value for the red channel.", alias="redMax") + red_min: Union[StrictFloat, StrictInt] = Field(description="The minimum value for the red channel.", alias="redMin") + red_scale: Optional[Union[StrictFloat, StrictInt]] = Field(default=None, description="A scaling factor for the red channel between 0 and 1.", alias="redScale") + type: StrictStr + __properties: ClassVar[List[str]] = ["blueBand", "blueMax", "blueMin", "blueScale", "greenBand", "greenMax", "greenMin", "greenScale", "noDataColor", "redBand", "redMax", "redMin", "redScale", "type"] + + @field_validator('type') def type_validate_enum(cls, value): """Validates the enum""" - if value not in ('multiBand'): + if value not in set(['multiBand']): raise ValueError("must be one of enum values ('multiBand')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> MultiBandRasterColorizer: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of MultiBandRasterColorizer from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> MultiBandRasterColorizer: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of MultiBandRasterColorizer from a dict""" if obj is None: return None if not isinstance(obj, dict): - return MultiBandRasterColorizer.parse_obj(obj) - - _obj = MultiBandRasterColorizer.parse_obj({ - "blue_band": obj.get("blueBand"), - "blue_max": obj.get("blueMax"), - "blue_min": obj.get("blueMin"), - "blue_scale": obj.get("blueScale"), - "green_band": obj.get("greenBand"), - "green_max": obj.get("greenMax"), - "green_min": obj.get("greenMin"), - "green_scale": obj.get("greenScale"), - "no_data_color": obj.get("noDataColor"), - "red_band": obj.get("redBand"), - "red_max": obj.get("redMax"), - "red_min": obj.get("redMin"), - "red_scale": obj.get("redScale"), + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "blueBand": obj.get("blueBand"), + "blueMax": obj.get("blueMax"), + "blueMin": obj.get("blueMin"), + "blueScale": obj.get("blueScale"), + "greenBand": obj.get("greenBand"), + "greenMax": obj.get("greenMax"), + "greenMin": obj.get("greenMin"), + "greenScale": obj.get("greenScale"), + "noDataColor": obj.get("noDataColor"), + "redBand": obj.get("redBand"), + "redMax": obj.get("redMax"), + "redMin": obj.get("redMin"), + "redScale": obj.get("redScale"), "type": obj.get("type") }) return _obj diff --git a/python/geoengine_openapi_client/models/multi_line_string.py b/python/geoengine_openapi_client/models/multi_line_string.py index 79686dde..d164f2ce 100644 --- a/python/geoengine_openapi_client/models/multi_line_string.py +++ b/python/geoengine_openapi_client/models/multi_line_string.py @@ -18,66 +18,82 @@ import re # noqa: F401 import json - -from typing import List -from pydantic import BaseModel, Field, conlist +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List from geoengine_openapi_client.models.coordinate2_d import Coordinate2D +from typing import Optional, Set +from typing_extensions import Self class MultiLineString(BaseModel): """ MultiLineString - """ - coordinates: conlist(conlist(Coordinate2D)) = Field(...) - __properties = ["coordinates"] + """ # noqa: E501 + coordinates: List[List[Coordinate2D]] + __properties: ClassVar[List[str]] = ["coordinates"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> MultiLineString: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of MultiLineString from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of each item in coordinates (list of list) _items = [] if self.coordinates: - for _item in self.coordinates: - if _item: + for _item_coordinates in self.coordinates: + if _item_coordinates: _items.append( - [_inner_item.to_dict() for _inner_item in _item if _inner_item is not None] + [_inner_item.to_dict() for _inner_item in _item_coordinates if _inner_item is not None] ) _dict['coordinates'] = _items return _dict @classmethod - def from_dict(cls, obj: dict) -> MultiLineString: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of MultiLineString from a dict""" if obj is None: return None if not isinstance(obj, dict): - return MultiLineString.parse_obj(obj) + return cls.model_validate(obj) - _obj = MultiLineString.parse_obj({ + _obj = cls.model_validate({ "coordinates": [ [Coordinate2D.from_dict(_inner_item) for _inner_item in _item] - for _item in obj.get("coordinates") + for _item in obj["coordinates"] ] if obj.get("coordinates") is not None else None }) return _obj diff --git a/python/geoengine_openapi_client/models/multi_point.py b/python/geoengine_openapi_client/models/multi_point.py index d16a104b..4d9c2b81 100644 --- a/python/geoengine_openapi_client/models/multi_point.py +++ b/python/geoengine_openapi_client/models/multi_point.py @@ -18,62 +18,78 @@ import re # noqa: F401 import json - -from typing import List -from pydantic import BaseModel, Field, conlist +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List from geoengine_openapi_client.models.coordinate2_d import Coordinate2D +from typing import Optional, Set +from typing_extensions import Self class MultiPoint(BaseModel): """ MultiPoint - """ - coordinates: conlist(Coordinate2D) = Field(...) - __properties = ["coordinates"] + """ # noqa: E501 + coordinates: List[Coordinate2D] + __properties: ClassVar[List[str]] = ["coordinates"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> MultiPoint: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of MultiPoint from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of each item in coordinates (list) _items = [] if self.coordinates: - for _item in self.coordinates: - if _item: - _items.append(_item.to_dict()) + for _item_coordinates in self.coordinates: + if _item_coordinates: + _items.append(_item_coordinates.to_dict()) _dict['coordinates'] = _items return _dict @classmethod - def from_dict(cls, obj: dict) -> MultiPoint: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of MultiPoint from a dict""" if obj is None: return None if not isinstance(obj, dict): - return MultiPoint.parse_obj(obj) + return cls.model_validate(obj) - _obj = MultiPoint.parse_obj({ - "coordinates": [Coordinate2D.from_dict(_item) for _item in obj.get("coordinates")] if obj.get("coordinates") is not None else None + _obj = cls.model_validate({ + "coordinates": [Coordinate2D.from_dict(_item) for _item in obj["coordinates"]] if obj.get("coordinates") is not None else None }) return _obj diff --git a/python/geoengine_openapi_client/models/multi_polygon.py b/python/geoengine_openapi_client/models/multi_polygon.py index 0d92d176..ce9105ca 100644 --- a/python/geoengine_openapi_client/models/multi_polygon.py +++ b/python/geoengine_openapi_client/models/multi_polygon.py @@ -18,66 +18,82 @@ import re # noqa: F401 import json - -from typing import List -from pydantic import BaseModel, Field, conlist +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List from geoengine_openapi_client.models.coordinate2_d import Coordinate2D +from typing import Optional, Set +from typing_extensions import Self class MultiPolygon(BaseModel): """ MultiPolygon - """ - polygons: conlist(conlist(conlist(Coordinate2D))) = Field(...) - __properties = ["polygons"] + """ # noqa: E501 + polygons: List[List[List[Coordinate2D]]] + __properties: ClassVar[List[str]] = ["polygons"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> MultiPolygon: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of MultiPolygon from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of each item in polygons (list of list) _items = [] if self.polygons: - for _item in self.polygons: - if _item: + for _item_polygons in self.polygons: + if _item_polygons: _items.append( - [_inner_item.to_dict() for _inner_item in _item if _inner_item is not None] + [_inner_item.to_dict() for _inner_item in _item_polygons if _inner_item is not None] ) _dict['polygons'] = _items return _dict @classmethod - def from_dict(cls, obj: dict) -> MultiPolygon: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of MultiPolygon from a dict""" if obj is None: return None if not isinstance(obj, dict): - return MultiPolygon.parse_obj(obj) + return cls.model_validate(obj) - _obj = MultiPolygon.parse_obj({ + _obj = cls.model_validate({ "polygons": [ [List[Coordinate2D].from_dict(_inner_item) for _inner_item in _item] - for _item in obj.get("polygons") + for _item in obj["polygons"] ] if obj.get("polygons") is not None else None }) return _obj diff --git a/python/geoengine_openapi_client/models/number_param.py b/python/geoengine_openapi_client/models/number_param.py index c68b0869..58875910 100644 --- a/python/geoengine_openapi_client/models/number_param.py +++ b/python/geoengine_openapi_client/models/number_param.py @@ -14,17 +14,15 @@ from __future__ import annotations -from inspect import getfullargspec import json import pprint -import re # noqa: F401 - +from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator from typing import Any, List, Optional -from pydantic import BaseModel, Field, StrictStr, ValidationError, validator from geoengine_openapi_client.models.derived_number import DerivedNumber from geoengine_openapi_client.models.static_number_param import StaticNumberParam -from typing import Union, Any, List, TYPE_CHECKING from pydantic import StrictStr, Field +from typing import Union, List, Set, Optional, Dict +from typing_extensions import Literal, Self NUMBERPARAM_ONE_OF_SCHEMAS = ["DerivedNumber", "StaticNumberParam"] @@ -36,16 +34,16 @@ class NumberParam(BaseModel): oneof_schema_1_validator: Optional[StaticNumberParam] = None # data type: DerivedNumber oneof_schema_2_validator: Optional[DerivedNumber] = None - if TYPE_CHECKING: - actual_instance: Union[DerivedNumber, StaticNumberParam] - else: - actual_instance: Any - one_of_schemas: List[str] = Field(NUMBERPARAM_ONE_OF_SCHEMAS, const=True) + actual_instance: Optional[Union[DerivedNumber, StaticNumberParam]] = None + one_of_schemas: Set[str] = { "DerivedNumber", "StaticNumberParam" } + + model_config = ConfigDict( + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - validate_assignment = True - discriminator_value_class_map = { + discriminator_value_class_map: Dict[str, str] = { } def __init__(self, *args, **kwargs) -> None: @@ -58,9 +56,9 @@ def __init__(self, *args, **kwargs) -> None: else: super().__init__(**kwargs) - @validator('actual_instance') + @field_validator('actual_instance') def actual_instance_must_validate_oneof(cls, v): - instance = NumberParam.construct() + instance = NumberParam.model_construct() error_messages = [] match = 0 # validate data type: StaticNumberParam @@ -83,13 +81,13 @@ def actual_instance_must_validate_oneof(cls, v): return v @classmethod - def from_dict(cls, obj: dict) -> NumberParam: + def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: return cls.from_json(json.dumps(obj)) @classmethod - def from_json(cls, json_str: str) -> NumberParam: + def from_json(cls, json_str: str) -> Self: """Returns the object represented by the json string""" - instance = NumberParam.construct() + instance = cls.model_construct() error_messages = [] match = 0 @@ -99,22 +97,22 @@ def from_json(cls, json_str: str) -> NumberParam: raise ValueError("Failed to lookup data type from the field `type` in the input.") # check if data type is `DerivedNumber` - if _data_type == "DerivedNumber": + if _data_type == "derived": instance.actual_instance = DerivedNumber.from_json(json_str) return instance # check if data type is `StaticNumberParam` - if _data_type == "StaticNumberParam": + if _data_type == "static": instance.actual_instance = StaticNumberParam.from_json(json_str) return instance # check if data type is `DerivedNumber` - if _data_type == "derived": + if _data_type == "DerivedNumber": instance.actual_instance = DerivedNumber.from_json(json_str) return instance # check if data type is `StaticNumberParam` - if _data_type == "static": + if _data_type == "StaticNumberParam": instance.actual_instance = StaticNumberParam.from_json(json_str) return instance @@ -145,19 +143,17 @@ def to_json(self) -> str: if self.actual_instance is None: return "null" - to_json = getattr(self.actual_instance, "to_json", None) - if callable(to_json): + if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): return self.actual_instance.to_json() else: return json.dumps(self.actual_instance) - def to_dict(self) -> dict: + def to_dict(self) -> Optional[Union[Dict[str, Any], DerivedNumber, StaticNumberParam]]: """Returns the dict representation of the actual instance""" if self.actual_instance is None: return None - to_dict = getattr(self.actual_instance, "to_dict", None) - if callable(to_dict): + if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): return self.actual_instance.to_dict() else: # primitive type @@ -165,6 +161,6 @@ def to_dict(self) -> dict: def to_str(self) -> str: """Returns the string representation of the actual instance""" - return pprint.pformat(self.dict()) + return pprint.pformat(self.model_dump()) diff --git a/python/geoengine_openapi_client/models/ogr_source_column_spec.py b/python/geoengine_openapi_client/models/ogr_source_column_spec.py index 6c46d33a..c2c4f5e5 100644 --- a/python/geoengine_openapi_client/models/ogr_source_column_spec.py +++ b/python/geoengine_openapi_client/models/ogr_source_column_spec.py @@ -18,84 +18,95 @@ import re # noqa: F401 import json - -from typing import Dict, List, Optional -from pydantic import BaseModel, Field, StrictStr, conlist +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional from geoengine_openapi_client.models.format_specifics import FormatSpecifics +from typing import Optional, Set +from typing_extensions import Self class OgrSourceColumnSpec(BaseModel): """ OgrSourceColumnSpec - """ - bool: Optional[conlist(StrictStr)] = None - datetime: Optional[conlist(StrictStr)] = None - float: Optional[conlist(StrictStr)] = None - format_specifics: Optional[FormatSpecifics] = Field(None, alias="formatSpecifics") - int: Optional[conlist(StrictStr)] = None + """ # noqa: E501 + bool: Optional[List[StrictStr]] = None + datetime: Optional[List[StrictStr]] = None + var_float: Optional[List[StrictStr]] = Field(default=None, alias="float") + format_specifics: Optional[FormatSpecifics] = Field(default=None, alias="formatSpecifics") + int: Optional[List[StrictStr]] = None rename: Optional[Dict[str, StrictStr]] = None - text: Optional[conlist(StrictStr)] = None - x: StrictStr = Field(...) + text: Optional[List[StrictStr]] = None + x: StrictStr y: Optional[StrictStr] = None - __properties = ["bool", "datetime", "float", "formatSpecifics", "int", "rename", "text", "x", "y"] + __properties: ClassVar[List[str]] = ["bool", "datetime", "float", "formatSpecifics", "int", "rename", "text", "x", "y"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> OgrSourceColumnSpec: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of OgrSourceColumnSpec from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of format_specifics if self.format_specifics: _dict['formatSpecifics'] = self.format_specifics.to_dict() # set to None if format_specifics (nullable) is None - # and __fields_set__ contains the field - if self.format_specifics is None and "format_specifics" in self.__fields_set__: + # and model_fields_set contains the field + if self.format_specifics is None and "format_specifics" in self.model_fields_set: _dict['formatSpecifics'] = None - # set to None if rename (nullable) is None - # and __fields_set__ contains the field - if self.rename is None and "rename" in self.__fields_set__: - _dict['rename'] = None - # set to None if y (nullable) is None - # and __fields_set__ contains the field - if self.y is None and "y" in self.__fields_set__: + # and model_fields_set contains the field + if self.y is None and "y" in self.model_fields_set: _dict['y'] = None return _dict @classmethod - def from_dict(cls, obj: dict) -> OgrSourceColumnSpec: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of OgrSourceColumnSpec from a dict""" if obj is None: return None if not isinstance(obj, dict): - return OgrSourceColumnSpec.parse_obj(obj) + return cls.model_validate(obj) - _obj = OgrSourceColumnSpec.parse_obj({ + _obj = cls.model_validate({ "bool": obj.get("bool"), "datetime": obj.get("datetime"), "float": obj.get("float"), - "format_specifics": FormatSpecifics.from_dict(obj.get("formatSpecifics")) if obj.get("formatSpecifics") is not None else None, + "formatSpecifics": FormatSpecifics.from_dict(obj["formatSpecifics"]) if obj.get("formatSpecifics") is not None else None, "int": obj.get("int"), "rename": obj.get("rename"), "text": obj.get("text"), diff --git a/python/geoengine_openapi_client/models/ogr_source_dataset.py b/python/geoengine_openapi_client/models/ogr_source_dataset.py index a951f582..b54dac6a 100644 --- a/python/geoengine_openapi_client/models/ogr_source_dataset.py +++ b/python/geoengine_openapi_client/models/ogr_source_dataset.py @@ -18,57 +18,74 @@ import re # noqa: F401 import json - -from typing import Optional -from pydantic import BaseModel, Field, StrictBool, StrictStr, conint +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing_extensions import Annotated from geoengine_openapi_client.models.ogr_source_column_spec import OgrSourceColumnSpec from geoengine_openapi_client.models.ogr_source_dataset_time_type import OgrSourceDatasetTimeType from geoengine_openapi_client.models.ogr_source_error_spec import OgrSourceErrorSpec from geoengine_openapi_client.models.typed_geometry import TypedGeometry from geoengine_openapi_client.models.vector_data_type import VectorDataType +from typing import Optional, Set +from typing_extensions import Self class OgrSourceDataset(BaseModel): """ OgrSourceDataset - """ - attribute_query: Optional[StrictStr] = Field(None, alias="attributeQuery") - cache_ttl: Optional[conint(strict=True, ge=0)] = Field(None, alias="cacheTtl") + """ # noqa: E501 + attribute_query: Optional[StrictStr] = Field(default=None, alias="attributeQuery") + cache_ttl: Optional[Annotated[int, Field(strict=True, ge=0)]] = Field(default=None, alias="cacheTtl") columns: Optional[OgrSourceColumnSpec] = None - data_type: Optional[VectorDataType] = Field(None, alias="dataType") - default_geometry: Optional[TypedGeometry] = Field(None, alias="defaultGeometry") - file_name: StrictStr = Field(..., alias="fileName") - force_ogr_spatial_filter: Optional[StrictBool] = Field(None, alias="forceOgrSpatialFilter") - force_ogr_time_filter: Optional[StrictBool] = Field(None, alias="forceOgrTimeFilter") - layer_name: StrictStr = Field(..., alias="layerName") - on_error: OgrSourceErrorSpec = Field(..., alias="onError") - sql_query: Optional[StrictStr] = Field(None, alias="sqlQuery") + data_type: Optional[VectorDataType] = Field(default=None, alias="dataType") + default_geometry: Optional[TypedGeometry] = Field(default=None, alias="defaultGeometry") + file_name: StrictStr = Field(alias="fileName") + force_ogr_spatial_filter: Optional[StrictBool] = Field(default=None, alias="forceOgrSpatialFilter") + force_ogr_time_filter: Optional[StrictBool] = Field(default=None, alias="forceOgrTimeFilter") + layer_name: StrictStr = Field(alias="layerName") + on_error: OgrSourceErrorSpec = Field(alias="onError") + sql_query: Optional[StrictStr] = Field(default=None, alias="sqlQuery") time: Optional[OgrSourceDatasetTimeType] = None - __properties = ["attributeQuery", "cacheTtl", "columns", "dataType", "defaultGeometry", "fileName", "forceOgrSpatialFilter", "forceOgrTimeFilter", "layerName", "onError", "sqlQuery", "time"] + __properties: ClassVar[List[str]] = ["attributeQuery", "cacheTtl", "columns", "dataType", "defaultGeometry", "fileName", "forceOgrSpatialFilter", "forceOgrTimeFilter", "layerName", "onError", "sqlQuery", "time"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> OgrSourceDataset: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of OgrSourceDataset from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of columns if self.columns: _dict['columns'] = self.columns.to_dict() @@ -79,54 +96,54 @@ def to_dict(self): if self.time: _dict['time'] = self.time.to_dict() # set to None if attribute_query (nullable) is None - # and __fields_set__ contains the field - if self.attribute_query is None and "attribute_query" in self.__fields_set__: + # and model_fields_set contains the field + if self.attribute_query is None and "attribute_query" in self.model_fields_set: _dict['attributeQuery'] = None # set to None if columns (nullable) is None - # and __fields_set__ contains the field - if self.columns is None and "columns" in self.__fields_set__: + # and model_fields_set contains the field + if self.columns is None and "columns" in self.model_fields_set: _dict['columns'] = None # set to None if data_type (nullable) is None - # and __fields_set__ contains the field - if self.data_type is None and "data_type" in self.__fields_set__: + # and model_fields_set contains the field + if self.data_type is None and "data_type" in self.model_fields_set: _dict['dataType'] = None # set to None if default_geometry (nullable) is None - # and __fields_set__ contains the field - if self.default_geometry is None and "default_geometry" in self.__fields_set__: + # and model_fields_set contains the field + if self.default_geometry is None and "default_geometry" in self.model_fields_set: _dict['defaultGeometry'] = None # set to None if sql_query (nullable) is None - # and __fields_set__ contains the field - if self.sql_query is None and "sql_query" in self.__fields_set__: + # and model_fields_set contains the field + if self.sql_query is None and "sql_query" in self.model_fields_set: _dict['sqlQuery'] = None return _dict @classmethod - def from_dict(cls, obj: dict) -> OgrSourceDataset: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of OgrSourceDataset from a dict""" if obj is None: return None if not isinstance(obj, dict): - return OgrSourceDataset.parse_obj(obj) - - _obj = OgrSourceDataset.parse_obj({ - "attribute_query": obj.get("attributeQuery"), - "cache_ttl": obj.get("cacheTtl"), - "columns": OgrSourceColumnSpec.from_dict(obj.get("columns")) if obj.get("columns") is not None else None, - "data_type": obj.get("dataType"), - "default_geometry": TypedGeometry.from_dict(obj.get("defaultGeometry")) if obj.get("defaultGeometry") is not None else None, - "file_name": obj.get("fileName"), - "force_ogr_spatial_filter": obj.get("forceOgrSpatialFilter"), - "force_ogr_time_filter": obj.get("forceOgrTimeFilter"), - "layer_name": obj.get("layerName"), - "on_error": obj.get("onError"), - "sql_query": obj.get("sqlQuery"), - "time": OgrSourceDatasetTimeType.from_dict(obj.get("time")) if obj.get("time") is not None else None + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "attributeQuery": obj.get("attributeQuery"), + "cacheTtl": obj.get("cacheTtl"), + "columns": OgrSourceColumnSpec.from_dict(obj["columns"]) if obj.get("columns") is not None else None, + "dataType": obj.get("dataType"), + "defaultGeometry": TypedGeometry.from_dict(obj["defaultGeometry"]) if obj.get("defaultGeometry") is not None else None, + "fileName": obj.get("fileName"), + "forceOgrSpatialFilter": obj.get("forceOgrSpatialFilter"), + "forceOgrTimeFilter": obj.get("forceOgrTimeFilter"), + "layerName": obj.get("layerName"), + "onError": obj.get("onError"), + "sqlQuery": obj.get("sqlQuery"), + "time": OgrSourceDatasetTimeType.from_dict(obj["time"]) if obj.get("time") is not None else None }) return _obj diff --git a/python/geoengine_openapi_client/models/ogr_source_dataset_time_type.py b/python/geoengine_openapi_client/models/ogr_source_dataset_time_type.py index 6521cce2..84f85813 100644 --- a/python/geoengine_openapi_client/models/ogr_source_dataset_time_type.py +++ b/python/geoengine_openapi_client/models/ogr_source_dataset_time_type.py @@ -14,19 +14,17 @@ from __future__ import annotations -from inspect import getfullargspec import json import pprint -import re # noqa: F401 - +from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator from typing import Any, List, Optional -from pydantic import BaseModel, Field, StrictStr, ValidationError, validator from geoengine_openapi_client.models.ogr_source_dataset_time_type_none import OgrSourceDatasetTimeTypeNone from geoengine_openapi_client.models.ogr_source_dataset_time_type_start import OgrSourceDatasetTimeTypeStart from geoengine_openapi_client.models.ogr_source_dataset_time_type_start_duration import OgrSourceDatasetTimeTypeStartDuration from geoengine_openapi_client.models.ogr_source_dataset_time_type_start_end import OgrSourceDatasetTimeTypeStartEnd -from typing import Union, Any, List, TYPE_CHECKING from pydantic import StrictStr, Field +from typing import Union, List, Set, Optional, Dict +from typing_extensions import Literal, Self OGRSOURCEDATASETTIMETYPE_ONE_OF_SCHEMAS = ["OgrSourceDatasetTimeTypeNone", "OgrSourceDatasetTimeTypeStart", "OgrSourceDatasetTimeTypeStartDuration", "OgrSourceDatasetTimeTypeStartEnd"] @@ -42,16 +40,16 @@ class OgrSourceDatasetTimeType(BaseModel): oneof_schema_3_validator: Optional[OgrSourceDatasetTimeTypeStartEnd] = None # data type: OgrSourceDatasetTimeTypeStartDuration oneof_schema_4_validator: Optional[OgrSourceDatasetTimeTypeStartDuration] = None - if TYPE_CHECKING: - actual_instance: Union[OgrSourceDatasetTimeTypeNone, OgrSourceDatasetTimeTypeStart, OgrSourceDatasetTimeTypeStartDuration, OgrSourceDatasetTimeTypeStartEnd] - else: - actual_instance: Any - one_of_schemas: List[str] = Field(OGRSOURCEDATASETTIMETYPE_ONE_OF_SCHEMAS, const=True) + actual_instance: Optional[Union[OgrSourceDatasetTimeTypeNone, OgrSourceDatasetTimeTypeStart, OgrSourceDatasetTimeTypeStartDuration, OgrSourceDatasetTimeTypeStartEnd]] = None + one_of_schemas: Set[str] = { "OgrSourceDatasetTimeTypeNone", "OgrSourceDatasetTimeTypeStart", "OgrSourceDatasetTimeTypeStartDuration", "OgrSourceDatasetTimeTypeStartEnd" } + + model_config = ConfigDict( + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - validate_assignment = True - discriminator_value_class_map = { + discriminator_value_class_map: Dict[str, str] = { } def __init__(self, *args, **kwargs) -> None: @@ -64,9 +62,9 @@ def __init__(self, *args, **kwargs) -> None: else: super().__init__(**kwargs) - @validator('actual_instance') + @field_validator('actual_instance') def actual_instance_must_validate_oneof(cls, v): - instance = OgrSourceDatasetTimeType.construct() + instance = OgrSourceDatasetTimeType.model_construct() error_messages = [] match = 0 # validate data type: OgrSourceDatasetTimeTypeNone @@ -99,13 +97,13 @@ def actual_instance_must_validate_oneof(cls, v): return v @classmethod - def from_dict(cls, obj: dict) -> OgrSourceDatasetTimeType: + def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: return cls.from_json(json.dumps(obj)) @classmethod - def from_json(cls, json_str: str) -> OgrSourceDatasetTimeType: + def from_json(cls, json_str: str) -> Self: """Returns the object represented by the json string""" - instance = OgrSourceDatasetTimeType.construct() + instance = cls.model_construct() error_messages = [] match = 0 @@ -115,42 +113,42 @@ def from_json(cls, json_str: str) -> OgrSourceDatasetTimeType: raise ValueError("Failed to lookup data type from the field `type` in the input.") # check if data type is `OgrSourceDatasetTimeTypeNone` - if _data_type == "OgrSourceDatasetTimeTypeNone": + if _data_type == "none": instance.actual_instance = OgrSourceDatasetTimeTypeNone.from_json(json_str) return instance # check if data type is `OgrSourceDatasetTimeTypeStart` - if _data_type == "OgrSourceDatasetTimeTypeStart": + if _data_type == "start": instance.actual_instance = OgrSourceDatasetTimeTypeStart.from_json(json_str) return instance # check if data type is `OgrSourceDatasetTimeTypeStartDuration` - if _data_type == "OgrSourceDatasetTimeTypeStartDuration": + if _data_type == "start+duration": instance.actual_instance = OgrSourceDatasetTimeTypeStartDuration.from_json(json_str) return instance # check if data type is `OgrSourceDatasetTimeTypeStartEnd` - if _data_type == "OgrSourceDatasetTimeTypeStartEnd": + if _data_type == "start+end": instance.actual_instance = OgrSourceDatasetTimeTypeStartEnd.from_json(json_str) return instance # check if data type is `OgrSourceDatasetTimeTypeNone` - if _data_type == "none": + if _data_type == "OgrSourceDatasetTimeTypeNone": instance.actual_instance = OgrSourceDatasetTimeTypeNone.from_json(json_str) return instance # check if data type is `OgrSourceDatasetTimeTypeStart` - if _data_type == "start": + if _data_type == "OgrSourceDatasetTimeTypeStart": instance.actual_instance = OgrSourceDatasetTimeTypeStart.from_json(json_str) return instance # check if data type is `OgrSourceDatasetTimeTypeStartDuration` - if _data_type == "start+duration": + if _data_type == "OgrSourceDatasetTimeTypeStartDuration": instance.actual_instance = OgrSourceDatasetTimeTypeStartDuration.from_json(json_str) return instance # check if data type is `OgrSourceDatasetTimeTypeStartEnd` - if _data_type == "start+end": + if _data_type == "OgrSourceDatasetTimeTypeStartEnd": instance.actual_instance = OgrSourceDatasetTimeTypeStartEnd.from_json(json_str) return instance @@ -193,19 +191,17 @@ def to_json(self) -> str: if self.actual_instance is None: return "null" - to_json = getattr(self.actual_instance, "to_json", None) - if callable(to_json): + if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): return self.actual_instance.to_json() else: return json.dumps(self.actual_instance) - def to_dict(self) -> dict: + def to_dict(self) -> Optional[Union[Dict[str, Any], OgrSourceDatasetTimeTypeNone, OgrSourceDatasetTimeTypeStart, OgrSourceDatasetTimeTypeStartDuration, OgrSourceDatasetTimeTypeStartEnd]]: """Returns the dict representation of the actual instance""" if self.actual_instance is None: return None - to_dict = getattr(self.actual_instance, "to_dict", None) - if callable(to_dict): + if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): return self.actual_instance.to_dict() else: # primitive type @@ -213,6 +209,6 @@ def to_dict(self) -> dict: def to_str(self) -> str: """Returns the string representation of the actual instance""" - return pprint.pformat(self.dict()) + return pprint.pformat(self.model_dump()) diff --git a/python/geoengine_openapi_client/models/ogr_source_dataset_time_type_none.py b/python/geoengine_openapi_client/models/ogr_source_dataset_time_type_none.py index 776ed4ce..f03974c6 100644 --- a/python/geoengine_openapi_client/models/ogr_source_dataset_time_type_none.py +++ b/python/geoengine_openapi_client/models/ogr_source_dataset_time_type_none.py @@ -18,60 +18,76 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr, validator +from pydantic import BaseModel, ConfigDict, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self class OgrSourceDatasetTimeTypeNone(BaseModel): """ OgrSourceDatasetTimeTypeNone - """ - type: StrictStr = Field(...) - __properties = ["type"] + """ # noqa: E501 + type: StrictStr + __properties: ClassVar[List[str]] = ["type"] - @validator('type') + @field_validator('type') def type_validate_enum(cls, value): """Validates the enum""" - if value not in ('none', 'start', 'start+end', 'start+duration'): - raise ValueError("must be one of enum values ('none', 'start', 'start+end', 'start+duration')") + if value not in set(['none']): + raise ValueError("must be one of enum values ('none')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> OgrSourceDatasetTimeTypeNone: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of OgrSourceDatasetTimeTypeNone from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> OgrSourceDatasetTimeTypeNone: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of OgrSourceDatasetTimeTypeNone from a dict""" if obj is None: return None if not isinstance(obj, dict): - return OgrSourceDatasetTimeTypeNone.parse_obj(obj) + return cls.model_validate(obj) - _obj = OgrSourceDatasetTimeTypeNone.parse_obj({ + _obj = cls.model_validate({ "type": obj.get("type") }) return _obj diff --git a/python/geoengine_openapi_client/models/ogr_source_dataset_time_type_start.py b/python/geoengine_openapi_client/models/ogr_source_dataset_time_type_start.py index bd1ad3fa..7107255e 100644 --- a/python/geoengine_openapi_client/models/ogr_source_dataset_time_type_start.py +++ b/python/geoengine_openapi_client/models/ogr_source_dataset_time_type_start.py @@ -18,53 +18,69 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr, validator +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List from geoengine_openapi_client.models.ogr_source_duration_spec import OgrSourceDurationSpec from geoengine_openapi_client.models.ogr_source_time_format import OgrSourceTimeFormat +from typing import Optional, Set +from typing_extensions import Self class OgrSourceDatasetTimeTypeStart(BaseModel): """ OgrSourceDatasetTimeTypeStart - """ - duration: OgrSourceDurationSpec = Field(...) - start_field: StrictStr = Field(..., alias="startField") - start_format: OgrSourceTimeFormat = Field(..., alias="startFormat") - type: StrictStr = Field(...) - __properties = ["duration", "startField", "startFormat", "type"] - - @validator('type') + """ # noqa: E501 + duration: OgrSourceDurationSpec + start_field: StrictStr = Field(alias="startField") + start_format: OgrSourceTimeFormat = Field(alias="startFormat") + type: StrictStr + __properties: ClassVar[List[str]] = ["duration", "startField", "startFormat", "type"] + + @field_validator('type') def type_validate_enum(cls, value): """Validates the enum""" - if value not in ('start'): + if value not in set(['start']): raise ValueError("must be one of enum values ('start')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> OgrSourceDatasetTimeTypeStart: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of OgrSourceDatasetTimeTypeStart from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of duration if self.duration: _dict['duration'] = self.duration.to_dict() @@ -74,18 +90,18 @@ def to_dict(self): return _dict @classmethod - def from_dict(cls, obj: dict) -> OgrSourceDatasetTimeTypeStart: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of OgrSourceDatasetTimeTypeStart from a dict""" if obj is None: return None if not isinstance(obj, dict): - return OgrSourceDatasetTimeTypeStart.parse_obj(obj) + return cls.model_validate(obj) - _obj = OgrSourceDatasetTimeTypeStart.parse_obj({ - "duration": OgrSourceDurationSpec.from_dict(obj.get("duration")) if obj.get("duration") is not None else None, - "start_field": obj.get("startField"), - "start_format": OgrSourceTimeFormat.from_dict(obj.get("startFormat")) if obj.get("startFormat") is not None else None, + _obj = cls.model_validate({ + "duration": OgrSourceDurationSpec.from_dict(obj["duration"]) if obj.get("duration") is not None else None, + "startField": obj.get("startField"), + "startFormat": OgrSourceTimeFormat.from_dict(obj["startFormat"]) if obj.get("startFormat") is not None else None, "type": obj.get("type") }) return _obj diff --git a/python/geoengine_openapi_client/models/ogr_source_dataset_time_type_start_duration.py b/python/geoengine_openapi_client/models/ogr_source_dataset_time_type_start_duration.py index 687ffcd9..1add572a 100644 --- a/python/geoengine_openapi_client/models/ogr_source_dataset_time_type_start_duration.py +++ b/python/geoengine_openapi_client/models/ogr_source_dataset_time_type_start_duration.py @@ -18,70 +18,86 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr, validator +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List from geoengine_openapi_client.models.ogr_source_time_format import OgrSourceTimeFormat +from typing import Optional, Set +from typing_extensions import Self class OgrSourceDatasetTimeTypeStartDuration(BaseModel): """ OgrSourceDatasetTimeTypeStartDuration - """ - duration_field: StrictStr = Field(..., alias="durationField") - start_field: StrictStr = Field(..., alias="startField") - start_format: OgrSourceTimeFormat = Field(..., alias="startFormat") - type: StrictStr = Field(...) - __properties = ["durationField", "startField", "startFormat", "type"] - - @validator('type') + """ # noqa: E501 + duration_field: StrictStr = Field(alias="durationField") + start_field: StrictStr = Field(alias="startField") + start_format: OgrSourceTimeFormat = Field(alias="startFormat") + type: StrictStr + __properties: ClassVar[List[str]] = ["durationField", "startField", "startFormat", "type"] + + @field_validator('type') def type_validate_enum(cls, value): """Validates the enum""" - if value not in ('start+duration'): + if value not in set(['start+duration']): raise ValueError("must be one of enum values ('start+duration')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> OgrSourceDatasetTimeTypeStartDuration: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of OgrSourceDatasetTimeTypeStartDuration from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of start_format if self.start_format: _dict['startFormat'] = self.start_format.to_dict() return _dict @classmethod - def from_dict(cls, obj: dict) -> OgrSourceDatasetTimeTypeStartDuration: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of OgrSourceDatasetTimeTypeStartDuration from a dict""" if obj is None: return None if not isinstance(obj, dict): - return OgrSourceDatasetTimeTypeStartDuration.parse_obj(obj) + return cls.model_validate(obj) - _obj = OgrSourceDatasetTimeTypeStartDuration.parse_obj({ - "duration_field": obj.get("durationField"), - "start_field": obj.get("startField"), - "start_format": OgrSourceTimeFormat.from_dict(obj.get("startFormat")) if obj.get("startFormat") is not None else None, + _obj = cls.model_validate({ + "durationField": obj.get("durationField"), + "startField": obj.get("startField"), + "startFormat": OgrSourceTimeFormat.from_dict(obj["startFormat"]) if obj.get("startFormat") is not None else None, "type": obj.get("type") }) return _obj diff --git a/python/geoengine_openapi_client/models/ogr_source_dataset_time_type_start_end.py b/python/geoengine_openapi_client/models/ogr_source_dataset_time_type_start_end.py index 3e803586..85dfa151 100644 --- a/python/geoengine_openapi_client/models/ogr_source_dataset_time_type_start_end.py +++ b/python/geoengine_openapi_client/models/ogr_source_dataset_time_type_start_end.py @@ -18,53 +18,69 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr, validator +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List from geoengine_openapi_client.models.ogr_source_time_format import OgrSourceTimeFormat +from typing import Optional, Set +from typing_extensions import Self class OgrSourceDatasetTimeTypeStartEnd(BaseModel): """ OgrSourceDatasetTimeTypeStartEnd - """ - end_field: StrictStr = Field(..., alias="endField") - end_format: OgrSourceTimeFormat = Field(..., alias="endFormat") - start_field: StrictStr = Field(..., alias="startField") - start_format: OgrSourceTimeFormat = Field(..., alias="startFormat") - type: StrictStr = Field(...) - __properties = ["endField", "endFormat", "startField", "startFormat", "type"] - - @validator('type') + """ # noqa: E501 + end_field: StrictStr = Field(alias="endField") + end_format: OgrSourceTimeFormat = Field(alias="endFormat") + start_field: StrictStr = Field(alias="startField") + start_format: OgrSourceTimeFormat = Field(alias="startFormat") + type: StrictStr + __properties: ClassVar[List[str]] = ["endField", "endFormat", "startField", "startFormat", "type"] + + @field_validator('type') def type_validate_enum(cls, value): """Validates the enum""" - if value not in ('start+end'): + if value not in set(['start+end']): raise ValueError("must be one of enum values ('start+end')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> OgrSourceDatasetTimeTypeStartEnd: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of OgrSourceDatasetTimeTypeStartEnd from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of end_format if self.end_format: _dict['endFormat'] = self.end_format.to_dict() @@ -74,19 +90,19 @@ def to_dict(self): return _dict @classmethod - def from_dict(cls, obj: dict) -> OgrSourceDatasetTimeTypeStartEnd: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of OgrSourceDatasetTimeTypeStartEnd from a dict""" if obj is None: return None if not isinstance(obj, dict): - return OgrSourceDatasetTimeTypeStartEnd.parse_obj(obj) + return cls.model_validate(obj) - _obj = OgrSourceDatasetTimeTypeStartEnd.parse_obj({ - "end_field": obj.get("endField"), - "end_format": OgrSourceTimeFormat.from_dict(obj.get("endFormat")) if obj.get("endFormat") is not None else None, - "start_field": obj.get("startField"), - "start_format": OgrSourceTimeFormat.from_dict(obj.get("startFormat")) if obj.get("startFormat") is not None else None, + _obj = cls.model_validate({ + "endField": obj.get("endField"), + "endFormat": OgrSourceTimeFormat.from_dict(obj["endFormat"]) if obj.get("endFormat") is not None else None, + "startField": obj.get("startField"), + "startFormat": OgrSourceTimeFormat.from_dict(obj["startFormat"]) if obj.get("startFormat") is not None else None, "type": obj.get("type") }) return _obj diff --git a/python/geoengine_openapi_client/models/ogr_source_duration_spec.py b/python/geoengine_openapi_client/models/ogr_source_duration_spec.py index 782ba3f5..d101ac2e 100644 --- a/python/geoengine_openapi_client/models/ogr_source_duration_spec.py +++ b/python/geoengine_openapi_client/models/ogr_source_duration_spec.py @@ -14,18 +14,16 @@ from __future__ import annotations -from inspect import getfullargspec import json import pprint -import re # noqa: F401 - +from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator from typing import Any, List, Optional -from pydantic import BaseModel, Field, StrictStr, ValidationError, validator from geoengine_openapi_client.models.ogr_source_duration_spec_infinite import OgrSourceDurationSpecInfinite from geoengine_openapi_client.models.ogr_source_duration_spec_value import OgrSourceDurationSpecValue from geoengine_openapi_client.models.ogr_source_duration_spec_zero import OgrSourceDurationSpecZero -from typing import Union, Any, List, TYPE_CHECKING from pydantic import StrictStr, Field +from typing import Union, List, Set, Optional, Dict +from typing_extensions import Literal, Self OGRSOURCEDURATIONSPEC_ONE_OF_SCHEMAS = ["OgrSourceDurationSpecInfinite", "OgrSourceDurationSpecValue", "OgrSourceDurationSpecZero"] @@ -39,16 +37,16 @@ class OgrSourceDurationSpec(BaseModel): oneof_schema_2_validator: Optional[OgrSourceDurationSpecZero] = None # data type: OgrSourceDurationSpecValue oneof_schema_3_validator: Optional[OgrSourceDurationSpecValue] = None - if TYPE_CHECKING: - actual_instance: Union[OgrSourceDurationSpecInfinite, OgrSourceDurationSpecValue, OgrSourceDurationSpecZero] - else: - actual_instance: Any - one_of_schemas: List[str] = Field(OGRSOURCEDURATIONSPEC_ONE_OF_SCHEMAS, const=True) + actual_instance: Optional[Union[OgrSourceDurationSpecInfinite, OgrSourceDurationSpecValue, OgrSourceDurationSpecZero]] = None + one_of_schemas: Set[str] = { "OgrSourceDurationSpecInfinite", "OgrSourceDurationSpecValue", "OgrSourceDurationSpecZero" } + + model_config = ConfigDict( + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - validate_assignment = True - discriminator_value_class_map = { + discriminator_value_class_map: Dict[str, str] = { } def __init__(self, *args, **kwargs) -> None: @@ -61,9 +59,9 @@ def __init__(self, *args, **kwargs) -> None: else: super().__init__(**kwargs) - @validator('actual_instance') + @field_validator('actual_instance') def actual_instance_must_validate_oneof(cls, v): - instance = OgrSourceDurationSpec.construct() + instance = OgrSourceDurationSpec.model_construct() error_messages = [] match = 0 # validate data type: OgrSourceDurationSpecInfinite @@ -91,13 +89,13 @@ def actual_instance_must_validate_oneof(cls, v): return v @classmethod - def from_dict(cls, obj: dict) -> OgrSourceDurationSpec: + def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: return cls.from_json(json.dumps(obj)) @classmethod - def from_json(cls, json_str: str) -> OgrSourceDurationSpec: + def from_json(cls, json_str: str) -> Self: """Returns the object represented by the json string""" - instance = OgrSourceDurationSpec.construct() + instance = cls.model_construct() error_messages = [] match = 0 @@ -107,32 +105,32 @@ def from_json(cls, json_str: str) -> OgrSourceDurationSpec: raise ValueError("Failed to lookup data type from the field `type` in the input.") # check if data type is `OgrSourceDurationSpecInfinite` - if _data_type == "OgrSourceDurationSpecInfinite": + if _data_type == "infinite": instance.actual_instance = OgrSourceDurationSpecInfinite.from_json(json_str) return instance # check if data type is `OgrSourceDurationSpecValue` - if _data_type == "OgrSourceDurationSpecValue": + if _data_type == "value": instance.actual_instance = OgrSourceDurationSpecValue.from_json(json_str) return instance # check if data type is `OgrSourceDurationSpecZero` - if _data_type == "OgrSourceDurationSpecZero": + if _data_type == "zero": instance.actual_instance = OgrSourceDurationSpecZero.from_json(json_str) return instance # check if data type is `OgrSourceDurationSpecInfinite` - if _data_type == "infinite": + if _data_type == "OgrSourceDurationSpecInfinite": instance.actual_instance = OgrSourceDurationSpecInfinite.from_json(json_str) return instance # check if data type is `OgrSourceDurationSpecValue` - if _data_type == "value": + if _data_type == "OgrSourceDurationSpecValue": instance.actual_instance = OgrSourceDurationSpecValue.from_json(json_str) return instance # check if data type is `OgrSourceDurationSpecZero` - if _data_type == "zero": + if _data_type == "OgrSourceDurationSpecZero": instance.actual_instance = OgrSourceDurationSpecZero.from_json(json_str) return instance @@ -169,19 +167,17 @@ def to_json(self) -> str: if self.actual_instance is None: return "null" - to_json = getattr(self.actual_instance, "to_json", None) - if callable(to_json): + if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): return self.actual_instance.to_json() else: return json.dumps(self.actual_instance) - def to_dict(self) -> dict: + def to_dict(self) -> Optional[Union[Dict[str, Any], OgrSourceDurationSpecInfinite, OgrSourceDurationSpecValue, OgrSourceDurationSpecZero]]: """Returns the dict representation of the actual instance""" if self.actual_instance is None: return None - to_dict = getattr(self.actual_instance, "to_dict", None) - if callable(to_dict): + if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): return self.actual_instance.to_dict() else: # primitive type @@ -189,6 +185,6 @@ def to_dict(self) -> dict: def to_str(self) -> str: """Returns the string representation of the actual instance""" - return pprint.pformat(self.dict()) + return pprint.pformat(self.model_dump()) diff --git a/python/geoengine_openapi_client/models/ogr_source_duration_spec_infinite.py b/python/geoengine_openapi_client/models/ogr_source_duration_spec_infinite.py index 26edaa40..2531a215 100644 --- a/python/geoengine_openapi_client/models/ogr_source_duration_spec_infinite.py +++ b/python/geoengine_openapi_client/models/ogr_source_duration_spec_infinite.py @@ -18,60 +18,76 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr, validator +from pydantic import BaseModel, ConfigDict, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self class OgrSourceDurationSpecInfinite(BaseModel): """ OgrSourceDurationSpecInfinite - """ - type: StrictStr = Field(...) - __properties = ["type"] + """ # noqa: E501 + type: StrictStr + __properties: ClassVar[List[str]] = ["type"] - @validator('type') + @field_validator('type') def type_validate_enum(cls, value): """Validates the enum""" - if value not in ('infinite', 'zero', 'value'): - raise ValueError("must be one of enum values ('infinite', 'zero', 'value')") + if value not in set(['infinite']): + raise ValueError("must be one of enum values ('infinite')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> OgrSourceDurationSpecInfinite: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of OgrSourceDurationSpecInfinite from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> OgrSourceDurationSpecInfinite: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of OgrSourceDurationSpecInfinite from a dict""" if obj is None: return None if not isinstance(obj, dict): - return OgrSourceDurationSpecInfinite.parse_obj(obj) + return cls.model_validate(obj) - _obj = OgrSourceDurationSpecInfinite.parse_obj({ + _obj = cls.model_validate({ "type": obj.get("type") }) return _obj diff --git a/python/geoengine_openapi_client/models/ogr_source_duration_spec_value.py b/python/geoengine_openapi_client/models/ogr_source_duration_spec_value.py index 2644322c..b3a2652f 100644 --- a/python/geoengine_openapi_client/models/ogr_source_duration_spec_value.py +++ b/python/geoengine_openapi_client/models/ogr_source_duration_spec_value.py @@ -18,63 +18,80 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr, conint, validator +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing_extensions import Annotated from geoengine_openapi_client.models.time_granularity import TimeGranularity +from typing import Optional, Set +from typing_extensions import Self class OgrSourceDurationSpecValue(BaseModel): """ OgrSourceDurationSpecValue - """ - granularity: TimeGranularity = Field(...) - step: conint(strict=True, ge=0) = Field(...) - type: StrictStr = Field(...) - __properties = ["granularity", "step", "type"] + """ # noqa: E501 + granularity: TimeGranularity + step: Annotated[int, Field(strict=True, ge=0)] + type: StrictStr + __properties: ClassVar[List[str]] = ["granularity", "step", "type"] - @validator('type') + @field_validator('type') def type_validate_enum(cls, value): """Validates the enum""" - if value not in ('value'): + if value not in set(['value']): raise ValueError("must be one of enum values ('value')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> OgrSourceDurationSpecValue: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of OgrSourceDurationSpecValue from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> OgrSourceDurationSpecValue: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of OgrSourceDurationSpecValue from a dict""" if obj is None: return None if not isinstance(obj, dict): - return OgrSourceDurationSpecValue.parse_obj(obj) + return cls.model_validate(obj) - _obj = OgrSourceDurationSpecValue.parse_obj({ + _obj = cls.model_validate({ "granularity": obj.get("granularity"), "step": obj.get("step"), "type": obj.get("type") diff --git a/python/geoengine_openapi_client/models/ogr_source_duration_spec_zero.py b/python/geoengine_openapi_client/models/ogr_source_duration_spec_zero.py index dde6d994..fac14ab0 100644 --- a/python/geoengine_openapi_client/models/ogr_source_duration_spec_zero.py +++ b/python/geoengine_openapi_client/models/ogr_source_duration_spec_zero.py @@ -18,60 +18,76 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr, validator +from pydantic import BaseModel, ConfigDict, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self class OgrSourceDurationSpecZero(BaseModel): """ OgrSourceDurationSpecZero - """ - type: StrictStr = Field(...) - __properties = ["type"] + """ # noqa: E501 + type: StrictStr + __properties: ClassVar[List[str]] = ["type"] - @validator('type') + @field_validator('type') def type_validate_enum(cls, value): """Validates the enum""" - if value not in ('zero'): + if value not in set(['zero']): raise ValueError("must be one of enum values ('zero')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> OgrSourceDurationSpecZero: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of OgrSourceDurationSpecZero from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> OgrSourceDurationSpecZero: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of OgrSourceDurationSpecZero from a dict""" if obj is None: return None if not isinstance(obj, dict): - return OgrSourceDurationSpecZero.parse_obj(obj) + return cls.model_validate(obj) - _obj = OgrSourceDurationSpecZero.parse_obj({ + _obj = cls.model_validate({ "type": obj.get("type") }) return _obj diff --git a/python/geoengine_openapi_client/models/ogr_source_error_spec.py b/python/geoengine_openapi_client/models/ogr_source_error_spec.py index 41fa1680..8a4b23a2 100644 --- a/python/geoengine_openapi_client/models/ogr_source_error_spec.py +++ b/python/geoengine_openapi_client/models/ogr_source_error_spec.py @@ -13,13 +13,10 @@ """ # noqa: E501 +from __future__ import annotations import json -import pprint -import re # noqa: F401 -from aenum import Enum, no_arg - - - +from enum import Enum +from typing_extensions import Self class OgrSourceErrorSpec(str, Enum): @@ -34,8 +31,8 @@ class OgrSourceErrorSpec(str, Enum): ABORT = 'abort' @classmethod - def from_json(cls, json_str: str) -> OgrSourceErrorSpec: + def from_json(cls, json_str: str) -> Self: """Create an instance of OgrSourceErrorSpec from a JSON string""" - return OgrSourceErrorSpec(json.loads(json_str)) + return cls(json.loads(json_str)) diff --git a/python/geoengine_openapi_client/models/ogr_source_time_format.py b/python/geoengine_openapi_client/models/ogr_source_time_format.py index d4cd6672..bfc39219 100644 --- a/python/geoengine_openapi_client/models/ogr_source_time_format.py +++ b/python/geoengine_openapi_client/models/ogr_source_time_format.py @@ -14,42 +14,37 @@ from __future__ import annotations -from inspect import getfullargspec import json import pprint -import re # noqa: F401 - +from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator from typing import Any, List, Optional -from pydantic import BaseModel, Field, StrictStr, ValidationError, validator -from geoengine_openapi_client.models.ogr_source_time_format_auto import OgrSourceTimeFormatAuto -from geoengine_openapi_client.models.ogr_source_time_format_custom import OgrSourceTimeFormatCustom -from geoengine_openapi_client.models.ogr_source_time_format_unix_time_stamp import OgrSourceTimeFormatUnixTimeStamp -from typing import Union, Any, List, TYPE_CHECKING +from geoengine_openapi_client.models.ogr_source_time_format_one_of import OgrSourceTimeFormatOneOf +from geoengine_openapi_client.models.ogr_source_time_format_one_of1 import OgrSourceTimeFormatOneOf1 +from geoengine_openapi_client.models.ogr_source_time_format_one_of2 import OgrSourceTimeFormatOneOf2 from pydantic import StrictStr, Field +from typing import Union, List, Set, Optional, Dict +from typing_extensions import Literal, Self -OGRSOURCETIMEFORMAT_ONE_OF_SCHEMAS = ["OgrSourceTimeFormatAuto", "OgrSourceTimeFormatCustom", "OgrSourceTimeFormatUnixTimeStamp"] +OGRSOURCETIMEFORMAT_ONE_OF_SCHEMAS = ["OgrSourceTimeFormatOneOf", "OgrSourceTimeFormatOneOf1", "OgrSourceTimeFormatOneOf2"] class OgrSourceTimeFormat(BaseModel): """ OgrSourceTimeFormat """ - # data type: OgrSourceTimeFormatCustom - oneof_schema_1_validator: Optional[OgrSourceTimeFormatCustom] = None - # data type: OgrSourceTimeFormatUnixTimeStamp - oneof_schema_2_validator: Optional[OgrSourceTimeFormatUnixTimeStamp] = None - # data type: OgrSourceTimeFormatAuto - oneof_schema_3_validator: Optional[OgrSourceTimeFormatAuto] = None - if TYPE_CHECKING: - actual_instance: Union[OgrSourceTimeFormatAuto, OgrSourceTimeFormatCustom, OgrSourceTimeFormatUnixTimeStamp] - else: - actual_instance: Any - one_of_schemas: List[str] = Field(OGRSOURCETIMEFORMAT_ONE_OF_SCHEMAS, const=True) - - class Config: - validate_assignment = True - - discriminator_value_class_map = { - } + # data type: OgrSourceTimeFormatOneOf + oneof_schema_1_validator: Optional[OgrSourceTimeFormatOneOf] = None + # data type: OgrSourceTimeFormatOneOf1 + oneof_schema_2_validator: Optional[OgrSourceTimeFormatOneOf1] = None + # data type: OgrSourceTimeFormatOneOf2 + oneof_schema_3_validator: Optional[OgrSourceTimeFormatOneOf2] = None + actual_instance: Optional[Union[OgrSourceTimeFormatOneOf, OgrSourceTimeFormatOneOf1, OgrSourceTimeFormatOneOf2]] = None + one_of_schemas: Set[str] = { "OgrSourceTimeFormatOneOf", "OgrSourceTimeFormatOneOf1", "OgrSourceTimeFormatOneOf2" } + + model_config = ConfigDict( + validate_assignment=True, + protected_namespaces=(), + ) + def __init__(self, *args, **kwargs) -> None: if args: @@ -61,106 +56,71 @@ def __init__(self, *args, **kwargs) -> None: else: super().__init__(**kwargs) - @validator('actual_instance') + @field_validator('actual_instance') def actual_instance_must_validate_oneof(cls, v): - instance = OgrSourceTimeFormat.construct() + instance = OgrSourceTimeFormat.model_construct() error_messages = [] match = 0 - # validate data type: OgrSourceTimeFormatCustom - if not isinstance(v, OgrSourceTimeFormatCustom): - error_messages.append(f"Error! Input type `{type(v)}` is not `OgrSourceTimeFormatCustom`") + # validate data type: OgrSourceTimeFormatOneOf + if not isinstance(v, OgrSourceTimeFormatOneOf): + error_messages.append(f"Error! Input type `{type(v)}` is not `OgrSourceTimeFormatOneOf`") else: match += 1 - # validate data type: OgrSourceTimeFormatUnixTimeStamp - if not isinstance(v, OgrSourceTimeFormatUnixTimeStamp): - error_messages.append(f"Error! Input type `{type(v)}` is not `OgrSourceTimeFormatUnixTimeStamp`") + # validate data type: OgrSourceTimeFormatOneOf1 + if not isinstance(v, OgrSourceTimeFormatOneOf1): + error_messages.append(f"Error! Input type `{type(v)}` is not `OgrSourceTimeFormatOneOf1`") else: match += 1 - # validate data type: OgrSourceTimeFormatAuto - if not isinstance(v, OgrSourceTimeFormatAuto): - error_messages.append(f"Error! Input type `{type(v)}` is not `OgrSourceTimeFormatAuto`") + # validate data type: OgrSourceTimeFormatOneOf2 + if not isinstance(v, OgrSourceTimeFormatOneOf2): + error_messages.append(f"Error! Input type `{type(v)}` is not `OgrSourceTimeFormatOneOf2`") else: match += 1 if match > 1: # more than 1 match - raise ValueError("Multiple matches found when setting `actual_instance` in OgrSourceTimeFormat with oneOf schemas: OgrSourceTimeFormatAuto, OgrSourceTimeFormatCustom, OgrSourceTimeFormatUnixTimeStamp. Details: " + ", ".join(error_messages)) + raise ValueError("Multiple matches found when setting `actual_instance` in OgrSourceTimeFormat with oneOf schemas: OgrSourceTimeFormatOneOf, OgrSourceTimeFormatOneOf1, OgrSourceTimeFormatOneOf2. Details: " + ", ".join(error_messages)) elif match == 0: # no match - raise ValueError("No match found when setting `actual_instance` in OgrSourceTimeFormat with oneOf schemas: OgrSourceTimeFormatAuto, OgrSourceTimeFormatCustom, OgrSourceTimeFormatUnixTimeStamp. Details: " + ", ".join(error_messages)) + raise ValueError("No match found when setting `actual_instance` in OgrSourceTimeFormat with oneOf schemas: OgrSourceTimeFormatOneOf, OgrSourceTimeFormatOneOf1, OgrSourceTimeFormatOneOf2. Details: " + ", ".join(error_messages)) else: return v @classmethod - def from_dict(cls, obj: dict) -> OgrSourceTimeFormat: + def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: return cls.from_json(json.dumps(obj)) @classmethod - def from_json(cls, json_str: str) -> OgrSourceTimeFormat: + def from_json(cls, json_str: str) -> Self: """Returns the object represented by the json string""" - instance = OgrSourceTimeFormat.construct() + instance = cls.model_construct() error_messages = [] match = 0 - # use oneOf discriminator to lookup the data type - _data_type = json.loads(json_str).get("format") - if not _data_type: - raise ValueError("Failed to lookup data type from the field `format` in the input.") - - # check if data type is `OgrSourceTimeFormatAuto` - if _data_type == "OgrSourceTimeFormatAuto": - instance.actual_instance = OgrSourceTimeFormatAuto.from_json(json_str) - return instance - - # check if data type is `OgrSourceTimeFormatCustom` - if _data_type == "OgrSourceTimeFormatCustom": - instance.actual_instance = OgrSourceTimeFormatCustom.from_json(json_str) - return instance - - # check if data type is `OgrSourceTimeFormatUnixTimeStamp` - if _data_type == "OgrSourceTimeFormatUnixTimeStamp": - instance.actual_instance = OgrSourceTimeFormatUnixTimeStamp.from_json(json_str) - return instance - - # check if data type is `OgrSourceTimeFormatAuto` - if _data_type == "auto": - instance.actual_instance = OgrSourceTimeFormatAuto.from_json(json_str) - return instance - - # check if data type is `OgrSourceTimeFormatCustom` - if _data_type == "custom": - instance.actual_instance = OgrSourceTimeFormatCustom.from_json(json_str) - return instance - - # check if data type is `OgrSourceTimeFormatUnixTimeStamp` - if _data_type == "unixTimeStamp": - instance.actual_instance = OgrSourceTimeFormatUnixTimeStamp.from_json(json_str) - return instance - - # deserialize data into OgrSourceTimeFormatCustom + # deserialize data into OgrSourceTimeFormatOneOf try: - instance.actual_instance = OgrSourceTimeFormatCustom.from_json(json_str) + instance.actual_instance = OgrSourceTimeFormatOneOf.from_json(json_str) match += 1 except (ValidationError, ValueError) as e: error_messages.append(str(e)) - # deserialize data into OgrSourceTimeFormatUnixTimeStamp + # deserialize data into OgrSourceTimeFormatOneOf1 try: - instance.actual_instance = OgrSourceTimeFormatUnixTimeStamp.from_json(json_str) + instance.actual_instance = OgrSourceTimeFormatOneOf1.from_json(json_str) match += 1 except (ValidationError, ValueError) as e: error_messages.append(str(e)) - # deserialize data into OgrSourceTimeFormatAuto + # deserialize data into OgrSourceTimeFormatOneOf2 try: - instance.actual_instance = OgrSourceTimeFormatAuto.from_json(json_str) + instance.actual_instance = OgrSourceTimeFormatOneOf2.from_json(json_str) match += 1 except (ValidationError, ValueError) as e: error_messages.append(str(e)) if match > 1: # more than 1 match - raise ValueError("Multiple matches found when deserializing the JSON string into OgrSourceTimeFormat with oneOf schemas: OgrSourceTimeFormatAuto, OgrSourceTimeFormatCustom, OgrSourceTimeFormatUnixTimeStamp. Details: " + ", ".join(error_messages)) + raise ValueError("Multiple matches found when deserializing the JSON string into OgrSourceTimeFormat with oneOf schemas: OgrSourceTimeFormatOneOf, OgrSourceTimeFormatOneOf1, OgrSourceTimeFormatOneOf2. Details: " + ", ".join(error_messages)) elif match == 0: # no match - raise ValueError("No match found when deserializing the JSON string into OgrSourceTimeFormat with oneOf schemas: OgrSourceTimeFormatAuto, OgrSourceTimeFormatCustom, OgrSourceTimeFormatUnixTimeStamp. Details: " + ", ".join(error_messages)) + raise ValueError("No match found when deserializing the JSON string into OgrSourceTimeFormat with oneOf schemas: OgrSourceTimeFormatOneOf, OgrSourceTimeFormatOneOf1, OgrSourceTimeFormatOneOf2. Details: " + ", ".join(error_messages)) else: return instance @@ -169,19 +129,17 @@ def to_json(self) -> str: if self.actual_instance is None: return "null" - to_json = getattr(self.actual_instance, "to_json", None) - if callable(to_json): + if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): return self.actual_instance.to_json() else: return json.dumps(self.actual_instance) - def to_dict(self) -> dict: + def to_dict(self) -> Optional[Union[Dict[str, Any], OgrSourceTimeFormatOneOf, OgrSourceTimeFormatOneOf1, OgrSourceTimeFormatOneOf2]]: """Returns the dict representation of the actual instance""" if self.actual_instance is None: return None - to_dict = getattr(self.actual_instance, "to_dict", None) - if callable(to_dict): + if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): return self.actual_instance.to_dict() else: # primitive type @@ -189,6 +147,6 @@ def to_dict(self) -> dict: def to_str(self) -> str: """Returns the string representation of the actual instance""" - return pprint.pformat(self.dict()) + return pprint.pformat(self.model_dump()) diff --git a/python/geoengine_openapi_client/models/ogr_source_time_format_one_of.py b/python/geoengine_openapi_client/models/ogr_source_time_format_one_of.py new file mode 100644 index 00000000..981dd3b2 --- /dev/null +++ b/python/geoengine_openapi_client/models/ogr_source_time_format_one_of.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Geo Engine API + + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) + + The version of the OpenAPI document: 0.8.0 + Contact: dev@geoengine.de + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class OgrSourceTimeFormatOneOf(BaseModel): + """ + OgrSourceTimeFormatOneOf + """ # noqa: E501 + custom_format: StrictStr = Field(alias="customFormat") + format: StrictStr + __properties: ClassVar[List[str]] = ["customFormat", "format"] + + @field_validator('format') + def format_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['custom']): + raise ValueError("must be one of enum values ('custom')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of OgrSourceTimeFormatOneOf from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of OgrSourceTimeFormatOneOf from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "customFormat": obj.get("customFormat"), + "format": obj.get("format") + }) + return _obj + + diff --git a/python/geoengine_openapi_client/models/ogr_source_time_format_one_of1.py b/python/geoengine_openapi_client/models/ogr_source_time_format_one_of1.py new file mode 100644 index 00000000..ba8c3da9 --- /dev/null +++ b/python/geoengine_openapi_client/models/ogr_source_time_format_one_of1.py @@ -0,0 +1,98 @@ +# coding: utf-8 + +""" + Geo Engine API + + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) + + The version of the OpenAPI document: 0.8.0 + Contact: dev@geoengine.de + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from geoengine_openapi_client.models.unix_time_stamp_type import UnixTimeStampType +from typing import Optional, Set +from typing_extensions import Self + +class OgrSourceTimeFormatOneOf1(BaseModel): + """ + OgrSourceTimeFormatOneOf1 + """ # noqa: E501 + format: StrictStr + timestamp_type: UnixTimeStampType = Field(alias="timestampType") + __properties: ClassVar[List[str]] = ["format", "timestampType"] + + @field_validator('format') + def format_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['unixTimeStamp']): + raise ValueError("must be one of enum values ('unixTimeStamp')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of OgrSourceTimeFormatOneOf1 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of OgrSourceTimeFormatOneOf1 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "format": obj.get("format"), + "timestampType": obj.get("timestampType") + }) + return _obj + + diff --git a/python/geoengine_openapi_client/models/ogr_source_time_format_one_of2.py b/python/geoengine_openapi_client/models/ogr_source_time_format_one_of2.py new file mode 100644 index 00000000..8008a6a3 --- /dev/null +++ b/python/geoengine_openapi_client/models/ogr_source_time_format_one_of2.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + Geo Engine API + + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) + + The version of the OpenAPI document: 0.8.0 + Contact: dev@geoengine.de + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class OgrSourceTimeFormatOneOf2(BaseModel): + """ + OgrSourceTimeFormatOneOf2 + """ # noqa: E501 + format: StrictStr + __properties: ClassVar[List[str]] = ["format"] + + @field_validator('format') + def format_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['auto']): + raise ValueError("must be one of enum values ('auto')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of OgrSourceTimeFormatOneOf2 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of OgrSourceTimeFormatOneOf2 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "format": obj.get("format") + }) + return _obj + + diff --git a/python/geoengine_openapi_client/models/operator_quota.py b/python/geoengine_openapi_client/models/operator_quota.py index 47f62608..435fd6db 100644 --- a/python/geoengine_openapi_client/models/operator_quota.py +++ b/python/geoengine_openapi_client/models/operator_quota.py @@ -18,58 +18,75 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr, conint +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self class OperatorQuota(BaseModel): """ OperatorQuota - """ - count: conint(strict=True, ge=0) = Field(...) - operator_name: StrictStr = Field(..., alias="operatorName") - operator_path: StrictStr = Field(..., alias="operatorPath") - __properties = ["count", "operatorName", "operatorPath"] + """ # noqa: E501 + count: Annotated[int, Field(strict=True, ge=0)] + operator_name: StrictStr = Field(alias="operatorName") + operator_path: StrictStr = Field(alias="operatorPath") + __properties: ClassVar[List[str]] = ["count", "operatorName", "operatorPath"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> OperatorQuota: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of OperatorQuota from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> OperatorQuota: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of OperatorQuota from a dict""" if obj is None: return None if not isinstance(obj, dict): - return OperatorQuota.parse_obj(obj) + return cls.model_validate(obj) - _obj = OperatorQuota.parse_obj({ + _obj = cls.model_validate({ "count": obj.get("count"), - "operator_name": obj.get("operatorName"), - "operator_path": obj.get("operatorPath") + "operatorName": obj.get("operatorName"), + "operatorPath": obj.get("operatorPath") }) return _obj diff --git a/python/geoengine_openapi_client/models/order_by.py b/python/geoengine_openapi_client/models/order_by.py index e6ce7f14..6c648d76 100644 --- a/python/geoengine_openapi_client/models/order_by.py +++ b/python/geoengine_openapi_client/models/order_by.py @@ -13,13 +13,10 @@ """ # noqa: E501 +from __future__ import annotations import json -import pprint -import re # noqa: F401 -from aenum import Enum, no_arg - - - +from enum import Enum +from typing_extensions import Self class OrderBy(str, Enum): @@ -34,8 +31,8 @@ class OrderBy(str, Enum): NAMEDESC = 'NameDesc' @classmethod - def from_json(cls, json_str: str) -> OrderBy: + def from_json(cls, json_str: str) -> Self: """Create an instance of OrderBy from a JSON string""" - return OrderBy(json.loads(json_str)) + return cls(json.loads(json_str)) diff --git a/python/geoengine_openapi_client/models/palette_colorizer.py b/python/geoengine_openapi_client/models/palette_colorizer.py index 081b5da7..3523ff47 100644 --- a/python/geoengine_openapi_client/models/palette_colorizer.py +++ b/python/geoengine_openapi_client/models/palette_colorizer.py @@ -18,78 +18,83 @@ import re # noqa: F401 import json - -from typing import Dict, List -from pydantic import BaseModel, Field, StrictInt, StrictStr, conlist, validator +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self class PaletteColorizer(BaseModel): """ PaletteColorizer - """ - colors: Dict[str, conlist(StrictInt, max_items=4, min_items=4)] = Field(..., description="A map from value to color It is assumed that is has at least one and at most 256 entries.") - default_color: conlist(StrictInt, max_items=4, min_items=4) = Field(..., alias="defaultColor") - no_data_color: conlist(StrictInt, max_items=4, min_items=4) = Field(..., alias="noDataColor") - type: StrictStr = Field(...) - __properties = ["colors", "defaultColor", "noDataColor", "type"] - - @validator('type') + """ # noqa: E501 + colors: Dict[str, Annotated[List[StrictInt], Field(min_length=4, max_length=4)]] = Field(description="A map from value to color It is assumed that is has at least one and at most 256 entries.") + default_color: Annotated[List[StrictInt], Field(min_length=4, max_length=4)] = Field(alias="defaultColor") + no_data_color: Annotated[List[StrictInt], Field(min_length=4, max_length=4)] = Field(alias="noDataColor") + type: StrictStr + __properties: ClassVar[List[str]] = ["colors", "defaultColor", "noDataColor", "type"] + + @field_validator('type') def type_validate_enum(cls, value): """Validates the enum""" - if value not in ('palette'): + if value not in set(['palette']): raise ValueError("must be one of enum values ('palette')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> PaletteColorizer: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of PaletteColorizer from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) - # Note: fixed wrong handling of colors field - return _dict + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) - # override the default output from pydantic by calling `to_dict()` of each value in colors (dict of array) - _field_dict_of_array = {} - if self.colors: - for _key in self.colors: - if self.colors[_key]: - _field_dict_of_array[_key] = [ - _item.to_dict() for _item in self.colors[_key] - ] - _dict['colors'] = _field_dict_of_array + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> PaletteColorizer: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of PaletteColorizer from a dict""" if obj is None: return None if not isinstance(obj, dict): - return PaletteColorizer.parse_obj(obj) + return cls.model_validate(obj) - _obj = PaletteColorizer.parse_obj({ + _obj = cls.model_validate({ "colors": obj.get("colors"), - "default_color": obj.get("defaultColor"), - "no_data_color": obj.get("noDataColor"), + "defaultColor": obj.get("defaultColor"), + "noDataColor": obj.get("noDataColor"), "type": obj.get("type") }) return _obj diff --git a/python/geoengine_openapi_client/models/permission.py b/python/geoengine_openapi_client/models/permission.py index 9f444bd4..6b317edb 100644 --- a/python/geoengine_openapi_client/models/permission.py +++ b/python/geoengine_openapi_client/models/permission.py @@ -13,13 +13,10 @@ """ # noqa: E501 +from __future__ import annotations import json -import pprint -import re # noqa: F401 -from aenum import Enum, no_arg - - - +from enum import Enum +from typing_extensions import Self class Permission(str, Enum): @@ -34,8 +31,8 @@ class Permission(str, Enum): OWNER = 'Owner' @classmethod - def from_json(cls, json_str: str) -> Permission: + def from_json(cls, json_str: str) -> Self: """Create an instance of Permission from a JSON string""" - return Permission(json.loads(json_str)) + return cls(json.loads(json_str)) diff --git a/python/geoengine_openapi_client/models/permission_list_options.py b/python/geoengine_openapi_client/models/permission_list_options.py index 59431ad4..270b9ea9 100644 --- a/python/geoengine_openapi_client/models/permission_list_options.py +++ b/python/geoengine_openapi_client/models/permission_list_options.py @@ -18,54 +18,71 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, conint +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self class PermissionListOptions(BaseModel): """ PermissionListOptions - """ - limit: conint(strict=True, ge=0) = Field(...) - offset: conint(strict=True, ge=0) = Field(...) - __properties = ["limit", "offset"] + """ # noqa: E501 + limit: Annotated[int, Field(strict=True, ge=0)] + offset: Annotated[int, Field(strict=True, ge=0)] + __properties: ClassVar[List[str]] = ["limit", "offset"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> PermissionListOptions: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of PermissionListOptions from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> PermissionListOptions: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of PermissionListOptions from a dict""" if obj is None: return None if not isinstance(obj, dict): - return PermissionListOptions.parse_obj(obj) + return cls.model_validate(obj) - _obj = PermissionListOptions.parse_obj({ + _obj = cls.model_validate({ "limit": obj.get("limit"), "offset": obj.get("offset") }) diff --git a/python/geoengine_openapi_client/models/permission_listing.py b/python/geoengine_openapi_client/models/permission_listing.py index 45410c14..3817b4e2 100644 --- a/python/geoengine_openapi_client/models/permission_listing.py +++ b/python/geoengine_openapi_client/models/permission_listing.py @@ -18,46 +18,62 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List from geoengine_openapi_client.models.permission import Permission from geoengine_openapi_client.models.resource import Resource from geoengine_openapi_client.models.role import Role +from typing import Optional, Set +from typing_extensions import Self class PermissionListing(BaseModel): """ PermissionListing - """ - permission: Permission = Field(...) - resource: Resource = Field(...) - role: Role = Field(...) - __properties = ["permission", "resource", "role"] + """ # noqa: E501 + permission: Permission + resource: Resource + role: Role + __properties: ClassVar[List[str]] = ["permission", "resource", "role"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> PermissionListing: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of PermissionListing from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of resource if self.resource: _dict['resource'] = self.resource.to_dict() @@ -67,18 +83,18 @@ def to_dict(self): return _dict @classmethod - def from_dict(cls, obj: dict) -> PermissionListing: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of PermissionListing from a dict""" if obj is None: return None if not isinstance(obj, dict): - return PermissionListing.parse_obj(obj) + return cls.model_validate(obj) - _obj = PermissionListing.parse_obj({ + _obj = cls.model_validate({ "permission": obj.get("permission"), - "resource": Resource.from_dict(obj.get("resource")) if obj.get("resource") is not None else None, - "role": Role.from_dict(obj.get("role")) if obj.get("role") is not None else None + "resource": Resource.from_dict(obj["resource"]) if obj.get("resource") is not None else None, + "role": Role.from_dict(obj["role"]) if obj.get("role") is not None else None }) return _obj diff --git a/python/geoengine_openapi_client/models/permission_request.py b/python/geoengine_openapi_client/models/permission_request.py index 9051ee73..4b517349 100644 --- a/python/geoengine_openapi_client/models/permission_request.py +++ b/python/geoengine_openapi_client/models/permission_request.py @@ -18,63 +18,79 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List from geoengine_openapi_client.models.permission import Permission from geoengine_openapi_client.models.resource import Resource +from typing import Optional, Set +from typing_extensions import Self class PermissionRequest(BaseModel): """ - Request for adding a new permission to the given role on the given resource # noqa: E501 - """ - permission: Permission = Field(...) - resource: Resource = Field(...) - role_id: StrictStr = Field(..., alias="roleId") - __properties = ["permission", "resource", "roleId"] + Request for adding a new permission to the given role on the given resource + """ # noqa: E501 + permission: Permission + resource: Resource + role_id: StrictStr = Field(alias="roleId") + __properties: ClassVar[List[str]] = ["permission", "resource", "roleId"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> PermissionRequest: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of PermissionRequest from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of resource if self.resource: _dict['resource'] = self.resource.to_dict() return _dict @classmethod - def from_dict(cls, obj: dict) -> PermissionRequest: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of PermissionRequest from a dict""" if obj is None: return None if not isinstance(obj, dict): - return PermissionRequest.parse_obj(obj) + return cls.model_validate(obj) - _obj = PermissionRequest.parse_obj({ + _obj = cls.model_validate({ "permission": obj.get("permission"), - "resource": Resource.from_dict(obj.get("resource")) if obj.get("resource") is not None else None, - "role_id": obj.get("roleId") + "resource": Resource.from_dict(obj["resource"]) if obj.get("resource") is not None else None, + "roleId": obj.get("roleId") }) return _obj diff --git a/python/geoengine_openapi_client/models/plot.py b/python/geoengine_openapi_client/models/plot.py index 063aeaf4..47d9f23b 100644 --- a/python/geoengine_openapi_client/models/plot.py +++ b/python/geoengine_openapi_client/models/plot.py @@ -18,54 +18,70 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self class Plot(BaseModel): """ Plot - """ - name: StrictStr = Field(...) - workflow: StrictStr = Field(...) - __properties = ["name", "workflow"] + """ # noqa: E501 + name: StrictStr + workflow: StrictStr + __properties: ClassVar[List[str]] = ["name", "workflow"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> Plot: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of Plot from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> Plot: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of Plot from a dict""" if obj is None: return None if not isinstance(obj, dict): - return Plot.parse_obj(obj) + return cls.model_validate(obj) - _obj = Plot.parse_obj({ + _obj = cls.model_validate({ "name": obj.get("name"), "workflow": obj.get("workflow") }) diff --git a/python/geoengine_openapi_client/models/plot_output_format.py b/python/geoengine_openapi_client/models/plot_output_format.py index 74d1cc0f..10612d29 100644 --- a/python/geoengine_openapi_client/models/plot_output_format.py +++ b/python/geoengine_openapi_client/models/plot_output_format.py @@ -13,13 +13,10 @@ """ # noqa: E501 +from __future__ import annotations import json -import pprint -import re # noqa: F401 -from aenum import Enum, no_arg - - - +from enum import Enum +from typing_extensions import Self class PlotOutputFormat(str, Enum): @@ -35,8 +32,8 @@ class PlotOutputFormat(str, Enum): IMAGEPNG = 'ImagePng' @classmethod - def from_json(cls, json_str: str) -> PlotOutputFormat: + def from_json(cls, json_str: str) -> Self: """Create an instance of PlotOutputFormat from a JSON string""" - return PlotOutputFormat(json.loads(json_str)) + return cls(json.loads(json_str)) diff --git a/python/geoengine_openapi_client/models/plot_result_descriptor.py b/python/geoengine_openapi_client/models/plot_result_descriptor.py index 602055a5..0202e6a0 100644 --- a/python/geoengine_openapi_client/models/plot_result_descriptor.py +++ b/python/geoengine_openapi_client/models/plot_result_descriptor.py @@ -18,45 +18,61 @@ import re # noqa: F401 import json - -from typing import Optional -from pydantic import BaseModel, Field, StrictStr +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional from geoengine_openapi_client.models.bounding_box2_d import BoundingBox2D from geoengine_openapi_client.models.time_interval import TimeInterval +from typing import Optional, Set +from typing_extensions import Self class PlotResultDescriptor(BaseModel): """ - A `ResultDescriptor` for plot queries # noqa: E501 - """ + A `ResultDescriptor` for plot queries + """ # noqa: E501 bbox: Optional[BoundingBox2D] = None - spatial_reference: StrictStr = Field(..., alias="spatialReference") + spatial_reference: StrictStr = Field(alias="spatialReference") time: Optional[TimeInterval] = None - __properties = ["bbox", "spatialReference", "time"] + __properties: ClassVar[List[str]] = ["bbox", "spatialReference", "time"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> PlotResultDescriptor: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of PlotResultDescriptor from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of bbox if self.bbox: _dict['bbox'] = self.bbox.to_dict() @@ -64,30 +80,30 @@ def to_dict(self): if self.time: _dict['time'] = self.time.to_dict() # set to None if bbox (nullable) is None - # and __fields_set__ contains the field - if self.bbox is None and "bbox" in self.__fields_set__: + # and model_fields_set contains the field + if self.bbox is None and "bbox" in self.model_fields_set: _dict['bbox'] = None # set to None if time (nullable) is None - # and __fields_set__ contains the field - if self.time is None and "time" in self.__fields_set__: + # and model_fields_set contains the field + if self.time is None and "time" in self.model_fields_set: _dict['time'] = None return _dict @classmethod - def from_dict(cls, obj: dict) -> PlotResultDescriptor: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of PlotResultDescriptor from a dict""" if obj is None: return None if not isinstance(obj, dict): - return PlotResultDescriptor.parse_obj(obj) + return cls.model_validate(obj) - _obj = PlotResultDescriptor.parse_obj({ - "bbox": BoundingBox2D.from_dict(obj.get("bbox")) if obj.get("bbox") is not None else None, - "spatial_reference": obj.get("spatialReference"), - "time": TimeInterval.from_dict(obj.get("time")) if obj.get("time") is not None else None + _obj = cls.model_validate({ + "bbox": BoundingBox2D.from_dict(obj["bbox"]) if obj.get("bbox") is not None else None, + "spatialReference": obj.get("spatialReference"), + "time": TimeInterval.from_dict(obj["time"]) if obj.get("time") is not None else None }) return _obj diff --git a/python/geoengine_openapi_client/models/point_symbology.py b/python/geoengine_openapi_client/models/point_symbology.py index 87ac8dd7..ccfacbf9 100644 --- a/python/geoengine_openapi_client/models/point_symbology.py +++ b/python/geoengine_openapi_client/models/point_symbology.py @@ -18,56 +18,72 @@ import re # noqa: F401 import json - -from typing import Optional -from pydantic import BaseModel, Field, StrictStr, validator +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional from geoengine_openapi_client.models.color_param import ColorParam from geoengine_openapi_client.models.number_param import NumberParam from geoengine_openapi_client.models.stroke_param import StrokeParam from geoengine_openapi_client.models.text_symbology import TextSymbology +from typing import Optional, Set +from typing_extensions import Self class PointSymbology(BaseModel): """ PointSymbology - """ - fill_color: ColorParam = Field(..., alias="fillColor") - radius: NumberParam = Field(...) - stroke: StrokeParam = Field(...) + """ # noqa: E501 + fill_color: ColorParam = Field(alias="fillColor") + radius: NumberParam + stroke: StrokeParam text: Optional[TextSymbology] = None - type: StrictStr = Field(...) - __properties = ["fillColor", "radius", "stroke", "text", "type"] + type: StrictStr + __properties: ClassVar[List[str]] = ["fillColor", "radius", "stroke", "text", "type"] - @validator('type') + @field_validator('type') def type_validate_enum(cls, value): """Validates the enum""" - if value not in ('point'): + if value not in set(['point']): raise ValueError("must be one of enum values ('point')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> PointSymbology: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of PointSymbology from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of fill_color if self.fill_color: _dict['fillColor'] = self.fill_color.to_dict() @@ -81,26 +97,26 @@ def to_dict(self): if self.text: _dict['text'] = self.text.to_dict() # set to None if text (nullable) is None - # and __fields_set__ contains the field - if self.text is None and "text" in self.__fields_set__: + # and model_fields_set contains the field + if self.text is None and "text" in self.model_fields_set: _dict['text'] = None return _dict @classmethod - def from_dict(cls, obj: dict) -> PointSymbology: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of PointSymbology from a dict""" if obj is None: return None if not isinstance(obj, dict): - return PointSymbology.parse_obj(obj) + return cls.model_validate(obj) - _obj = PointSymbology.parse_obj({ - "fill_color": ColorParam.from_dict(obj.get("fillColor")) if obj.get("fillColor") is not None else None, - "radius": NumberParam.from_dict(obj.get("radius")) if obj.get("radius") is not None else None, - "stroke": StrokeParam.from_dict(obj.get("stroke")) if obj.get("stroke") is not None else None, - "text": TextSymbology.from_dict(obj.get("text")) if obj.get("text") is not None else None, + _obj = cls.model_validate({ + "fillColor": ColorParam.from_dict(obj["fillColor"]) if obj.get("fillColor") is not None else None, + "radius": NumberParam.from_dict(obj["radius"]) if obj.get("radius") is not None else None, + "stroke": StrokeParam.from_dict(obj["stroke"]) if obj.get("stroke") is not None else None, + "text": TextSymbology.from_dict(obj["text"]) if obj.get("text") is not None else None, "type": obj.get("type") }) return _obj diff --git a/python/geoengine_openapi_client/models/polygon_symbology.py b/python/geoengine_openapi_client/models/polygon_symbology.py index 4ddc9bcf..5ce7d23e 100644 --- a/python/geoengine_openapi_client/models/polygon_symbology.py +++ b/python/geoengine_openapi_client/models/polygon_symbology.py @@ -18,55 +18,71 @@ import re # noqa: F401 import json - -from typing import Optional -from pydantic import BaseModel, Field, StrictBool, StrictStr, validator +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional from geoengine_openapi_client.models.color_param import ColorParam from geoengine_openapi_client.models.stroke_param import StrokeParam from geoengine_openapi_client.models.text_symbology import TextSymbology +from typing import Optional, Set +from typing_extensions import Self class PolygonSymbology(BaseModel): """ PolygonSymbology - """ - auto_simplified: StrictBool = Field(..., alias="autoSimplified") - fill_color: ColorParam = Field(..., alias="fillColor") - stroke: StrokeParam = Field(...) + """ # noqa: E501 + auto_simplified: StrictBool = Field(alias="autoSimplified") + fill_color: ColorParam = Field(alias="fillColor") + stroke: StrokeParam text: Optional[TextSymbology] = None - type: StrictStr = Field(...) - __properties = ["autoSimplified", "fillColor", "stroke", "text", "type"] + type: StrictStr + __properties: ClassVar[List[str]] = ["autoSimplified", "fillColor", "stroke", "text", "type"] - @validator('type') + @field_validator('type') def type_validate_enum(cls, value): """Validates the enum""" - if value not in ('polygon'): + if value not in set(['polygon']): raise ValueError("must be one of enum values ('polygon')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> PolygonSymbology: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of PolygonSymbology from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of fill_color if self.fill_color: _dict['fillColor'] = self.fill_color.to_dict() @@ -77,26 +93,26 @@ def to_dict(self): if self.text: _dict['text'] = self.text.to_dict() # set to None if text (nullable) is None - # and __fields_set__ contains the field - if self.text is None and "text" in self.__fields_set__: + # and model_fields_set contains the field + if self.text is None and "text" in self.model_fields_set: _dict['text'] = None return _dict @classmethod - def from_dict(cls, obj: dict) -> PolygonSymbology: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of PolygonSymbology from a dict""" if obj is None: return None if not isinstance(obj, dict): - return PolygonSymbology.parse_obj(obj) + return cls.model_validate(obj) - _obj = PolygonSymbology.parse_obj({ - "auto_simplified": obj.get("autoSimplified"), - "fill_color": ColorParam.from_dict(obj.get("fillColor")) if obj.get("fillColor") is not None else None, - "stroke": StrokeParam.from_dict(obj.get("stroke")) if obj.get("stroke") is not None else None, - "text": TextSymbology.from_dict(obj.get("text")) if obj.get("text") is not None else None, + _obj = cls.model_validate({ + "autoSimplified": obj.get("autoSimplified"), + "fillColor": ColorParam.from_dict(obj["fillColor"]) if obj.get("fillColor") is not None else None, + "stroke": StrokeParam.from_dict(obj["stroke"]) if obj.get("stroke") is not None else None, + "text": TextSymbology.from_dict(obj["text"]) if obj.get("text") is not None else None, "type": obj.get("type") }) return _obj diff --git a/python/geoengine_openapi_client/models/project.py b/python/geoengine_openapi_client/models/project.py index 3bbc26e2..e8230d89 100644 --- a/python/geoengine_openapi_client/models/project.py +++ b/python/geoengine_openapi_client/models/project.py @@ -18,69 +18,85 @@ import re # noqa: F401 import json - -from typing import List -from pydantic import BaseModel, Field, StrictStr, conlist +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List from geoengine_openapi_client.models.plot import Plot from geoengine_openapi_client.models.project_layer import ProjectLayer from geoengine_openapi_client.models.project_version import ProjectVersion from geoengine_openapi_client.models.st_rectangle import STRectangle from geoengine_openapi_client.models.time_step import TimeStep +from typing import Optional, Set +from typing_extensions import Self class Project(BaseModel): """ Project - """ - bounds: STRectangle = Field(...) - description: StrictStr = Field(...) - id: StrictStr = Field(...) - layers: conlist(ProjectLayer) = Field(...) - name: StrictStr = Field(...) - plots: conlist(Plot) = Field(...) - time_step: TimeStep = Field(..., alias="timeStep") - version: ProjectVersion = Field(...) - __properties = ["bounds", "description", "id", "layers", "name", "plots", "timeStep", "version"] - - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + """ # noqa: E501 + bounds: STRectangle + description: StrictStr + id: StrictStr + layers: List[ProjectLayer] + name: StrictStr + plots: List[Plot] + time_step: TimeStep = Field(alias="timeStep") + version: ProjectVersion + __properties: ClassVar[List[str]] = ["bounds", "description", "id", "layers", "name", "plots", "timeStep", "version"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> Project: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of Project from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of bounds if self.bounds: _dict['bounds'] = self.bounds.to_dict() # override the default output from pydantic by calling `to_dict()` of each item in layers (list) _items = [] if self.layers: - for _item in self.layers: - if _item: - _items.append(_item.to_dict()) + for _item_layers in self.layers: + if _item_layers: + _items.append(_item_layers.to_dict()) _dict['layers'] = _items # override the default output from pydantic by calling `to_dict()` of each item in plots (list) _items = [] if self.plots: - for _item in self.plots: - if _item: - _items.append(_item.to_dict()) + for _item_plots in self.plots: + if _item_plots: + _items.append(_item_plots.to_dict()) _dict['plots'] = _items # override the default output from pydantic by calling `to_dict()` of time_step if self.time_step: @@ -91,23 +107,23 @@ def to_dict(self): return _dict @classmethod - def from_dict(cls, obj: dict) -> Project: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of Project from a dict""" if obj is None: return None if not isinstance(obj, dict): - return Project.parse_obj(obj) + return cls.model_validate(obj) - _obj = Project.parse_obj({ - "bounds": STRectangle.from_dict(obj.get("bounds")) if obj.get("bounds") is not None else None, + _obj = cls.model_validate({ + "bounds": STRectangle.from_dict(obj["bounds"]) if obj.get("bounds") is not None else None, "description": obj.get("description"), "id": obj.get("id"), - "layers": [ProjectLayer.from_dict(_item) for _item in obj.get("layers")] if obj.get("layers") is not None else None, + "layers": [ProjectLayer.from_dict(_item) for _item in obj["layers"]] if obj.get("layers") is not None else None, "name": obj.get("name"), - "plots": [Plot.from_dict(_item) for _item in obj.get("plots")] if obj.get("plots") is not None else None, - "time_step": TimeStep.from_dict(obj.get("timeStep")) if obj.get("timeStep") is not None else None, - "version": ProjectVersion.from_dict(obj.get("version")) if obj.get("version") is not None else None + "plots": [Plot.from_dict(_item) for _item in obj["plots"]] if obj.get("plots") is not None else None, + "timeStep": TimeStep.from_dict(obj["timeStep"]) if obj.get("timeStep") is not None else None, + "version": ProjectVersion.from_dict(obj["version"]) if obj.get("version") is not None else None }) return _obj diff --git a/python/geoengine_openapi_client/models/project_layer.py b/python/geoengine_openapi_client/models/project_layer.py index 0fbc1374..94565988 100644 --- a/python/geoengine_openapi_client/models/project_layer.py +++ b/python/geoengine_openapi_client/models/project_layer.py @@ -18,46 +18,62 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List from geoengine_openapi_client.models.layer_visibility import LayerVisibility from geoengine_openapi_client.models.symbology import Symbology +from typing import Optional, Set +from typing_extensions import Self class ProjectLayer(BaseModel): """ ProjectLayer - """ - name: StrictStr = Field(...) - symbology: Symbology = Field(...) - visibility: LayerVisibility = Field(...) - workflow: StrictStr = Field(...) - __properties = ["name", "symbology", "visibility", "workflow"] + """ # noqa: E501 + name: StrictStr + symbology: Symbology + visibility: LayerVisibility + workflow: StrictStr + __properties: ClassVar[List[str]] = ["name", "symbology", "visibility", "workflow"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> ProjectLayer: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of ProjectLayer from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of symbology if self.symbology: _dict['symbology'] = self.symbology.to_dict() @@ -67,18 +83,18 @@ def to_dict(self): return _dict @classmethod - def from_dict(cls, obj: dict) -> ProjectLayer: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of ProjectLayer from a dict""" if obj is None: return None if not isinstance(obj, dict): - return ProjectLayer.parse_obj(obj) + return cls.model_validate(obj) - _obj = ProjectLayer.parse_obj({ + _obj = cls.model_validate({ "name": obj.get("name"), - "symbology": Symbology.from_dict(obj.get("symbology")) if obj.get("symbology") is not None else None, - "visibility": LayerVisibility.from_dict(obj.get("visibility")) if obj.get("visibility") is not None else None, + "symbology": Symbology.from_dict(obj["symbology"]) if obj.get("symbology") is not None else None, + "visibility": LayerVisibility.from_dict(obj["visibility"]) if obj.get("visibility") is not None else None, "workflow": obj.get("workflow") }) return _obj diff --git a/python/geoengine_openapi_client/models/project_listing.py b/python/geoengine_openapi_client/models/project_listing.py index 7fd65772..23607e75 100644 --- a/python/geoengine_openapi_client/models/project_listing.py +++ b/python/geoengine_openapi_client/models/project_listing.py @@ -19,63 +19,80 @@ import json from datetime import datetime -from typing import List -from pydantic import BaseModel, Field, StrictStr, conlist +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self class ProjectListing(BaseModel): """ ProjectListing - """ - changed: datetime = Field(...) - description: StrictStr = Field(...) - id: StrictStr = Field(...) - layer_names: conlist(StrictStr) = Field(..., alias="layerNames") - name: StrictStr = Field(...) - plot_names: conlist(StrictStr) = Field(..., alias="plotNames") - __properties = ["changed", "description", "id", "layerNames", "name", "plotNames"] - - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + """ # noqa: E501 + changed: datetime + description: StrictStr + id: StrictStr + layer_names: List[StrictStr] = Field(alias="layerNames") + name: StrictStr + plot_names: List[StrictStr] = Field(alias="plotNames") + __properties: ClassVar[List[str]] = ["changed", "description", "id", "layerNames", "name", "plotNames"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> ProjectListing: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of ProjectListing from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> ProjectListing: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of ProjectListing from a dict""" if obj is None: return None if not isinstance(obj, dict): - return ProjectListing.parse_obj(obj) + return cls.model_validate(obj) - _obj = ProjectListing.parse_obj({ + _obj = cls.model_validate({ "changed": obj.get("changed"), "description": obj.get("description"), "id": obj.get("id"), - "layer_names": obj.get("layerNames"), + "layerNames": obj.get("layerNames"), "name": obj.get("name"), - "plot_names": obj.get("plotNames") + "plotNames": obj.get("plotNames") }) return _obj diff --git a/python/geoengine_openapi_client/models/project_resource.py b/python/geoengine_openapi_client/models/project_resource.py index b5503164..8a1a2020 100644 --- a/python/geoengine_openapi_client/models/project_resource.py +++ b/python/geoengine_openapi_client/models/project_resource.py @@ -18,61 +18,77 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr, validator +from pydantic import BaseModel, ConfigDict, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self class ProjectResource(BaseModel): """ ProjectResource - """ - id: StrictStr = Field(...) - type: StrictStr = Field(...) - __properties = ["id", "type"] + """ # noqa: E501 + id: StrictStr + type: StrictStr + __properties: ClassVar[List[str]] = ["id", "type"] - @validator('type') + @field_validator('type') def type_validate_enum(cls, value): """Validates the enum""" - if value not in ('project'): + if value not in set(['project']): raise ValueError("must be one of enum values ('project')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> ProjectResource: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of ProjectResource from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> ProjectResource: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of ProjectResource from a dict""" if obj is None: return None if not isinstance(obj, dict): - return ProjectResource.parse_obj(obj) + return cls.model_validate(obj) - _obj = ProjectResource.parse_obj({ + _obj = cls.model_validate({ "id": obj.get("id"), "type": obj.get("type") }) diff --git a/python/geoengine_openapi_client/models/project_update_token.py b/python/geoengine_openapi_client/models/project_update_token.py index d5c8ec2d..6dd63f3d 100644 --- a/python/geoengine_openapi_client/models/project_update_token.py +++ b/python/geoengine_openapi_client/models/project_update_token.py @@ -13,13 +13,10 @@ """ # noqa: E501 +from __future__ import annotations import json -import pprint -import re # noqa: F401 -from aenum import Enum, no_arg - - - +from enum import Enum +from typing_extensions import Self class ProjectUpdateToken(str, Enum): @@ -34,8 +31,8 @@ class ProjectUpdateToken(str, Enum): DELETE = 'delete' @classmethod - def from_json(cls, json_str: str) -> ProjectUpdateToken: + def from_json(cls, json_str: str) -> Self: """Create an instance of ProjectUpdateToken from a JSON string""" - return ProjectUpdateToken(json.loads(json_str)) + return cls(json.loads(json_str)) diff --git a/python/geoengine_openapi_client/models/project_version.py b/python/geoengine_openapi_client/models/project_version.py index 4c9126f8..6f8004df 100644 --- a/python/geoengine_openapi_client/models/project_version.py +++ b/python/geoengine_openapi_client/models/project_version.py @@ -19,53 +19,70 @@ import json from datetime import datetime - -from pydantic import BaseModel, Field, StrictStr +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self class ProjectVersion(BaseModel): """ ProjectVersion - """ - changed: datetime = Field(...) - id: StrictStr = Field(...) - __properties = ["changed", "id"] + """ # noqa: E501 + changed: datetime + id: StrictStr + __properties: ClassVar[List[str]] = ["changed", "id"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> ProjectVersion: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of ProjectVersion from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> ProjectVersion: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of ProjectVersion from a dict""" if obj is None: return None if not isinstance(obj, dict): - return ProjectVersion.parse_obj(obj) + return cls.model_validate(obj) - _obj = ProjectVersion.parse_obj({ + _obj = cls.model_validate({ "changed": obj.get("changed"), "id": obj.get("id") }) diff --git a/python/geoengine_openapi_client/models/provenance.py b/python/geoengine_openapi_client/models/provenance.py index 955225ee..b402680d 100644 --- a/python/geoengine_openapi_client/models/provenance.py +++ b/python/geoengine_openapi_client/models/provenance.py @@ -18,55 +18,71 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self class Provenance(BaseModel): """ Provenance - """ - citation: StrictStr = Field(...) - license: StrictStr = Field(...) - uri: StrictStr = Field(...) - __properties = ["citation", "license", "uri"] + """ # noqa: E501 + citation: StrictStr + license: StrictStr + uri: StrictStr + __properties: ClassVar[List[str]] = ["citation", "license", "uri"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> Provenance: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of Provenance from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> Provenance: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of Provenance from a dict""" if obj is None: return None if not isinstance(obj, dict): - return Provenance.parse_obj(obj) + return cls.model_validate(obj) - _obj = Provenance.parse_obj({ + _obj = cls.model_validate({ "citation": obj.get("citation"), "license": obj.get("license"), "uri": obj.get("uri") diff --git a/python/geoengine_openapi_client/models/provenance_entry.py b/python/geoengine_openapi_client/models/provenance_entry.py index 9dc18f34..d57acd80 100644 --- a/python/geoengine_openapi_client/models/provenance_entry.py +++ b/python/geoengine_openapi_client/models/provenance_entry.py @@ -18,50 +18,66 @@ import re # noqa: F401 import json - -from typing import List -from pydantic import BaseModel, Field, conlist +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List from geoengine_openapi_client.models.data_id import DataId from geoengine_openapi_client.models.provenance import Provenance +from typing import Optional, Set +from typing_extensions import Self class ProvenanceEntry(BaseModel): """ ProvenanceEntry - """ - data: conlist(DataId) = Field(...) - provenance: Provenance = Field(...) - __properties = ["data", "provenance"] + """ # noqa: E501 + data: List[DataId] + provenance: Provenance + __properties: ClassVar[List[str]] = ["data", "provenance"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> ProvenanceEntry: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of ProvenanceEntry from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of each item in data (list) _items = [] if self.data: - for _item in self.data: - if _item: - _items.append(_item.to_dict()) + for _item_data in self.data: + if _item_data: + _items.append(_item_data.to_dict()) _dict['data'] = _items # override the default output from pydantic by calling `to_dict()` of provenance if self.provenance: @@ -69,17 +85,17 @@ def to_dict(self): return _dict @classmethod - def from_dict(cls, obj: dict) -> ProvenanceEntry: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of ProvenanceEntry from a dict""" if obj is None: return None if not isinstance(obj, dict): - return ProvenanceEntry.parse_obj(obj) + return cls.model_validate(obj) - _obj = ProvenanceEntry.parse_obj({ - "data": [DataId.from_dict(_item) for _item in obj.get("data")] if obj.get("data") is not None else None, - "provenance": Provenance.from_dict(obj.get("provenance")) if obj.get("provenance") is not None else None + _obj = cls.model_validate({ + "data": [DataId.from_dict(_item) for _item in obj["data"]] if obj.get("data") is not None else None, + "provenance": Provenance.from_dict(obj["provenance"]) if obj.get("provenance") is not None else None }) return _obj diff --git a/python/geoengine_openapi_client/models/provenance_output.py b/python/geoengine_openapi_client/models/provenance_output.py index a2853146..f8eb6cdc 100644 --- a/python/geoengine_openapi_client/models/provenance_output.py +++ b/python/geoengine_openapi_client/models/provenance_output.py @@ -18,73 +18,89 @@ import re # noqa: F401 import json - -from typing import List, Optional -from pydantic import BaseModel, Field, conlist +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional from geoengine_openapi_client.models.data_id import DataId from geoengine_openapi_client.models.provenance import Provenance +from typing import Optional, Set +from typing_extensions import Self class ProvenanceOutput(BaseModel): """ ProvenanceOutput - """ - data: DataId = Field(...) - provenance: Optional[conlist(Provenance)] = None - __properties = ["data", "provenance"] + """ # noqa: E501 + data: DataId + provenance: Optional[List[Provenance]] = None + __properties: ClassVar[List[str]] = ["data", "provenance"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> ProvenanceOutput: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of ProvenanceOutput from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of data if self.data: _dict['data'] = self.data.to_dict() # override the default output from pydantic by calling `to_dict()` of each item in provenance (list) _items = [] if self.provenance: - for _item in self.provenance: - if _item: - _items.append(_item.to_dict()) + for _item_provenance in self.provenance: + if _item_provenance: + _items.append(_item_provenance.to_dict()) _dict['provenance'] = _items # set to None if provenance (nullable) is None - # and __fields_set__ contains the field - if self.provenance is None and "provenance" in self.__fields_set__: + # and model_fields_set contains the field + if self.provenance is None and "provenance" in self.model_fields_set: _dict['provenance'] = None return _dict @classmethod - def from_dict(cls, obj: dict) -> ProvenanceOutput: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of ProvenanceOutput from a dict""" if obj is None: return None if not isinstance(obj, dict): - return ProvenanceOutput.parse_obj(obj) + return cls.model_validate(obj) - _obj = ProvenanceOutput.parse_obj({ - "data": DataId.from_dict(obj.get("data")) if obj.get("data") is not None else None, - "provenance": [Provenance.from_dict(_item) for _item in obj.get("provenance")] if obj.get("provenance") is not None else None + _obj = cls.model_validate({ + "data": DataId.from_dict(obj["data"]) if obj.get("data") is not None else None, + "provenance": [Provenance.from_dict(_item) for _item in obj["provenance"]] if obj.get("provenance") is not None else None }) return _obj diff --git a/python/geoengine_openapi_client/models/provenances.py b/python/geoengine_openapi_client/models/provenances.py index 16ef2a3f..c41a9e94 100644 --- a/python/geoengine_openapi_client/models/provenances.py +++ b/python/geoengine_openapi_client/models/provenances.py @@ -18,62 +18,78 @@ import re # noqa: F401 import json - -from typing import List -from pydantic import BaseModel, Field, conlist +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List from geoengine_openapi_client.models.provenance import Provenance +from typing import Optional, Set +from typing_extensions import Self class Provenances(BaseModel): """ Provenances - """ - provenances: conlist(Provenance) = Field(...) - __properties = ["provenances"] + """ # noqa: E501 + provenances: List[Provenance] + __properties: ClassVar[List[str]] = ["provenances"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> Provenances: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of Provenances from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of each item in provenances (list) _items = [] if self.provenances: - for _item in self.provenances: - if _item: - _items.append(_item.to_dict()) + for _item_provenances in self.provenances: + if _item_provenances: + _items.append(_item_provenances.to_dict()) _dict['provenances'] = _items return _dict @classmethod - def from_dict(cls, obj: dict) -> Provenances: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of Provenances from a dict""" if obj is None: return None if not isinstance(obj, dict): - return Provenances.parse_obj(obj) + return cls.model_validate(obj) - _obj = Provenances.parse_obj({ - "provenances": [Provenance.from_dict(_item) for _item in obj.get("provenances")] if obj.get("provenances") is not None else None + _obj = cls.model_validate({ + "provenances": [Provenance.from_dict(_item) for _item in obj["provenances"]] if obj.get("provenances") is not None else None }) return _obj diff --git a/python/geoengine_openapi_client/models/provider_capabilities.py b/python/geoengine_openapi_client/models/provider_capabilities.py index d4a3836a..6a81ba8d 100644 --- a/python/geoengine_openapi_client/models/provider_capabilities.py +++ b/python/geoengine_openapi_client/models/provider_capabilities.py @@ -18,60 +18,76 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictBool +from pydantic import BaseModel, ConfigDict, StrictBool +from typing import Any, ClassVar, Dict, List from geoengine_openapi_client.models.search_capabilities import SearchCapabilities +from typing import Optional, Set +from typing_extensions import Self class ProviderCapabilities(BaseModel): """ ProviderCapabilities - """ - listing: StrictBool = Field(...) - search: SearchCapabilities = Field(...) - __properties = ["listing", "search"] + """ # noqa: E501 + listing: StrictBool + search: SearchCapabilities + __properties: ClassVar[List[str]] = ["listing", "search"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> ProviderCapabilities: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of ProviderCapabilities from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of search if self.search: _dict['search'] = self.search.to_dict() return _dict @classmethod - def from_dict(cls, obj: dict) -> ProviderCapabilities: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of ProviderCapabilities from a dict""" if obj is None: return None if not isinstance(obj, dict): - return ProviderCapabilities.parse_obj(obj) + return cls.model_validate(obj) - _obj = ProviderCapabilities.parse_obj({ + _obj = cls.model_validate({ "listing": obj.get("listing"), - "search": SearchCapabilities.from_dict(obj.get("search")) if obj.get("search") is not None else None + "search": SearchCapabilities.from_dict(obj["search"]) if obj.get("search") is not None else None }) return _obj diff --git a/python/geoengine_openapi_client/models/provider_layer_collection_id.py b/python/geoengine_openapi_client/models/provider_layer_collection_id.py index 12f0fad6..3db6bf81 100644 --- a/python/geoengine_openapi_client/models/provider_layer_collection_id.py +++ b/python/geoengine_openapi_client/models/provider_layer_collection_id.py @@ -18,56 +18,72 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self class ProviderLayerCollectionId(BaseModel): """ ProviderLayerCollectionId - """ - collection_id: StrictStr = Field(..., alias="collectionId") - provider_id: StrictStr = Field(..., alias="providerId") - __properties = ["collectionId", "providerId"] + """ # noqa: E501 + collection_id: StrictStr = Field(alias="collectionId") + provider_id: StrictStr = Field(alias="providerId") + __properties: ClassVar[List[str]] = ["collectionId", "providerId"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> ProviderLayerCollectionId: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of ProviderLayerCollectionId from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> ProviderLayerCollectionId: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of ProviderLayerCollectionId from a dict""" if obj is None: return None if not isinstance(obj, dict): - return ProviderLayerCollectionId.parse_obj(obj) + return cls.model_validate(obj) - _obj = ProviderLayerCollectionId.parse_obj({ - "collection_id": obj.get("collectionId"), - "provider_id": obj.get("providerId") + _obj = cls.model_validate({ + "collectionId": obj.get("collectionId"), + "providerId": obj.get("providerId") }) return _obj diff --git a/python/geoengine_openapi_client/models/provider_layer_id.py b/python/geoengine_openapi_client/models/provider_layer_id.py index 54a3afe6..0c2f1b35 100644 --- a/python/geoengine_openapi_client/models/provider_layer_id.py +++ b/python/geoengine_openapi_client/models/provider_layer_id.py @@ -18,56 +18,72 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self class ProviderLayerId(BaseModel): """ ProviderLayerId - """ - layer_id: StrictStr = Field(..., alias="layerId") - provider_id: StrictStr = Field(..., alias="providerId") - __properties = ["layerId", "providerId"] + """ # noqa: E501 + layer_id: StrictStr = Field(alias="layerId") + provider_id: StrictStr = Field(alias="providerId") + __properties: ClassVar[List[str]] = ["layerId", "providerId"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> ProviderLayerId: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of ProviderLayerId from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> ProviderLayerId: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of ProviderLayerId from a dict""" if obj is None: return None if not isinstance(obj, dict): - return ProviderLayerId.parse_obj(obj) + return cls.model_validate(obj) - _obj = ProviderLayerId.parse_obj({ - "layer_id": obj.get("layerId"), - "provider_id": obj.get("providerId") + _obj = cls.model_validate({ + "layerId": obj.get("layerId"), + "providerId": obj.get("providerId") }) return _obj diff --git a/python/geoengine_openapi_client/models/query_rectangle.py b/python/geoengine_openapi_client/models/query_rectangle.py new file mode 100644 index 00000000..077561cf --- /dev/null +++ b/python/geoengine_openapi_client/models/query_rectangle.py @@ -0,0 +1,104 @@ +# coding: utf-8 + +""" + Geo Engine API + + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) + + The version of the OpenAPI document: 0.8.0 + Contact: dev@geoengine.de + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List +from geoengine_openapi_client.models.spatial_partition2_d import SpatialPartition2D +from geoengine_openapi_client.models.spatial_resolution import SpatialResolution +from geoengine_openapi_client.models.time_interval import TimeInterval +from typing import Optional, Set +from typing_extensions import Self + +class QueryRectangle(BaseModel): + """ + A spatio-temporal rectangle with a specified resolution + """ # noqa: E501 + spatial_bounds: SpatialPartition2D = Field(alias="spatialBounds") + spatial_resolution: SpatialResolution = Field(alias="spatialResolution") + time_interval: TimeInterval = Field(alias="timeInterval") + __properties: ClassVar[List[str]] = ["spatialBounds", "spatialResolution", "timeInterval"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of QueryRectangle from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of spatial_bounds + if self.spatial_bounds: + _dict['spatialBounds'] = self.spatial_bounds.to_dict() + # override the default output from pydantic by calling `to_dict()` of spatial_resolution + if self.spatial_resolution: + _dict['spatialResolution'] = self.spatial_resolution.to_dict() + # override the default output from pydantic by calling `to_dict()` of time_interval + if self.time_interval: + _dict['timeInterval'] = self.time_interval.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of QueryRectangle from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "spatialBounds": SpatialPartition2D.from_dict(obj["spatialBounds"]) if obj.get("spatialBounds") is not None else None, + "spatialResolution": SpatialResolution.from_dict(obj["spatialResolution"]) if obj.get("spatialResolution") is not None else None, + "timeInterval": TimeInterval.from_dict(obj["timeInterval"]) if obj.get("timeInterval") is not None else None + }) + return _obj + + diff --git a/python/geoengine_openapi_client/models/quota.py b/python/geoengine_openapi_client/models/quota.py index b3e6880c..0894165a 100644 --- a/python/geoengine_openapi_client/models/quota.py +++ b/python/geoengine_openapi_client/models/quota.py @@ -18,54 +18,71 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictInt, conint +from pydantic import BaseModel, ConfigDict, Field, StrictInt +from typing import Any, ClassVar, Dict, List +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self class Quota(BaseModel): """ Quota - """ - available: StrictInt = Field(...) - used: conint(strict=True, ge=0) = Field(...) - __properties = ["available", "used"] + """ # noqa: E501 + available: StrictInt + used: Annotated[int, Field(strict=True, ge=0)] + __properties: ClassVar[List[str]] = ["available", "used"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> Quota: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of Quota from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> Quota: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of Quota from a dict""" if obj is None: return None if not isinstance(obj, dict): - return Quota.parse_obj(obj) + return cls.model_validate(obj) - _obj = Quota.parse_obj({ + _obj = cls.model_validate({ "available": obj.get("available"), "used": obj.get("used") }) diff --git a/python/geoengine_openapi_client/models/raster_band_descriptor.py b/python/geoengine_openapi_client/models/raster_band_descriptor.py index 2b02ccc9..7210ada6 100644 --- a/python/geoengine_openapi_client/models/raster_band_descriptor.py +++ b/python/geoengine_openapi_client/models/raster_band_descriptor.py @@ -18,59 +18,75 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List from geoengine_openapi_client.models.measurement import Measurement +from typing import Optional, Set +from typing_extensions import Self class RasterBandDescriptor(BaseModel): """ RasterBandDescriptor - """ - measurement: Measurement = Field(...) - name: StrictStr = Field(...) - __properties = ["measurement", "name"] + """ # noqa: E501 + measurement: Measurement + name: StrictStr + __properties: ClassVar[List[str]] = ["measurement", "name"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> RasterBandDescriptor: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of RasterBandDescriptor from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of measurement if self.measurement: _dict['measurement'] = self.measurement.to_dict() return _dict @classmethod - def from_dict(cls, obj: dict) -> RasterBandDescriptor: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of RasterBandDescriptor from a dict""" if obj is None: return None if not isinstance(obj, dict): - return RasterBandDescriptor.parse_obj(obj) + return cls.model_validate(obj) - _obj = RasterBandDescriptor.parse_obj({ - "measurement": Measurement.from_dict(obj.get("measurement")) if obj.get("measurement") is not None else None, + _obj = cls.model_validate({ + "measurement": Measurement.from_dict(obj["measurement"]) if obj.get("measurement") is not None else None, "name": obj.get("name") }) return _obj diff --git a/python/geoengine_openapi_client/models/raster_colorizer.py b/python/geoengine_openapi_client/models/raster_colorizer.py index e506a151..d9f061d2 100644 --- a/python/geoengine_openapi_client/models/raster_colorizer.py +++ b/python/geoengine_openapi_client/models/raster_colorizer.py @@ -14,17 +14,15 @@ from __future__ import annotations -from inspect import getfullargspec import json import pprint -import re # noqa: F401 - +from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator from typing import Any, List, Optional -from pydantic import BaseModel, Field, StrictStr, ValidationError, validator from geoengine_openapi_client.models.multi_band_raster_colorizer import MultiBandRasterColorizer from geoengine_openapi_client.models.single_band_raster_colorizer import SingleBandRasterColorizer -from typing import Union, Any, List, TYPE_CHECKING from pydantic import StrictStr, Field +from typing import Union, List, Set, Optional, Dict +from typing_extensions import Literal, Self RASTERCOLORIZER_ONE_OF_SCHEMAS = ["MultiBandRasterColorizer", "SingleBandRasterColorizer"] @@ -36,16 +34,16 @@ class RasterColorizer(BaseModel): oneof_schema_1_validator: Optional[SingleBandRasterColorizer] = None # data type: MultiBandRasterColorizer oneof_schema_2_validator: Optional[MultiBandRasterColorizer] = None - if TYPE_CHECKING: - actual_instance: Union[MultiBandRasterColorizer, SingleBandRasterColorizer] - else: - actual_instance: Any - one_of_schemas: List[str] = Field(RASTERCOLORIZER_ONE_OF_SCHEMAS, const=True) + actual_instance: Optional[Union[MultiBandRasterColorizer, SingleBandRasterColorizer]] = None + one_of_schemas: Set[str] = { "MultiBandRasterColorizer", "SingleBandRasterColorizer" } + + model_config = ConfigDict( + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - validate_assignment = True - discriminator_value_class_map = { + discriminator_value_class_map: Dict[str, str] = { } def __init__(self, *args, **kwargs) -> None: @@ -58,9 +56,9 @@ def __init__(self, *args, **kwargs) -> None: else: super().__init__(**kwargs) - @validator('actual_instance') + @field_validator('actual_instance') def actual_instance_must_validate_oneof(cls, v): - instance = RasterColorizer.construct() + instance = RasterColorizer.model_construct() error_messages = [] match = 0 # validate data type: SingleBandRasterColorizer @@ -83,13 +81,13 @@ def actual_instance_must_validate_oneof(cls, v): return v @classmethod - def from_dict(cls, obj: dict) -> RasterColorizer: + def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: return cls.from_json(json.dumps(obj)) @classmethod - def from_json(cls, json_str: str) -> RasterColorizer: + def from_json(cls, json_str: str) -> Self: """Returns the object represented by the json string""" - instance = RasterColorizer.construct() + instance = cls.model_construct() error_messages = [] match = 0 @@ -99,22 +97,22 @@ def from_json(cls, json_str: str) -> RasterColorizer: raise ValueError("Failed to lookup data type from the field `type` in the input.") # check if data type is `MultiBandRasterColorizer` - if _data_type == "MultiBandRasterColorizer": + if _data_type == "multiBand": instance.actual_instance = MultiBandRasterColorizer.from_json(json_str) return instance # check if data type is `SingleBandRasterColorizer` - if _data_type == "SingleBandRasterColorizer": + if _data_type == "singleBand": instance.actual_instance = SingleBandRasterColorizer.from_json(json_str) return instance # check if data type is `MultiBandRasterColorizer` - if _data_type == "multiBand": + if _data_type == "MultiBandRasterColorizer": instance.actual_instance = MultiBandRasterColorizer.from_json(json_str) return instance # check if data type is `SingleBandRasterColorizer` - if _data_type == "singleBand": + if _data_type == "SingleBandRasterColorizer": instance.actual_instance = SingleBandRasterColorizer.from_json(json_str) return instance @@ -145,19 +143,17 @@ def to_json(self) -> str: if self.actual_instance is None: return "null" - to_json = getattr(self.actual_instance, "to_json", None) - if callable(to_json): + if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): return self.actual_instance.to_json() else: return json.dumps(self.actual_instance) - def to_dict(self) -> dict: + def to_dict(self) -> Optional[Union[Dict[str, Any], MultiBandRasterColorizer, SingleBandRasterColorizer]]: """Returns the dict representation of the actual instance""" if self.actual_instance is None: return None - to_dict = getattr(self.actual_instance, "to_dict", None) - if callable(to_dict): + if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): return self.actual_instance.to_dict() else: # primitive type @@ -165,6 +161,6 @@ def to_dict(self) -> dict: def to_str(self) -> str: """Returns the string representation of the actual instance""" - return pprint.pformat(self.dict()) + return pprint.pformat(self.model_dump()) diff --git a/python/geoengine_openapi_client/models/raster_data_type.py b/python/geoengine_openapi_client/models/raster_data_type.py index f3ec3610..b6d0d198 100644 --- a/python/geoengine_openapi_client/models/raster_data_type.py +++ b/python/geoengine_openapi_client/models/raster_data_type.py @@ -13,13 +13,10 @@ """ # noqa: E501 +from __future__ import annotations import json -import pprint -import re # noqa: F401 -from aenum import Enum, no_arg - - - +from enum import Enum +from typing_extensions import Self class RasterDataType(str, Enum): @@ -42,8 +39,8 @@ class RasterDataType(str, Enum): F64 = 'F64' @classmethod - def from_json(cls, json_str: str) -> RasterDataType: + def from_json(cls, json_str: str) -> Self: """Create an instance of RasterDataType from a JSON string""" - return RasterDataType(json.loads(json_str)) + return cls(json.loads(json_str)) diff --git a/python/geoengine_openapi_client/models/raster_dataset_from_workflow.py b/python/geoengine_openapi_client/models/raster_dataset_from_workflow.py index d65b57ce..2d4fb84e 100644 --- a/python/geoengine_openapi_client/models/raster_dataset_from_workflow.py +++ b/python/geoengine_openapi_client/models/raster_dataset_from_workflow.py @@ -18,78 +18,87 @@ import re # noqa: F401 import json - -from typing import Optional -from pydantic import BaseModel, Field, StrictBool, StrictStr -from geoengine_openapi_client.models.raster_query_rectangle import RasterQueryRectangle +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from geoengine_openapi_client.models.query_rectangle import QueryRectangle +from typing import Optional, Set +from typing_extensions import Self class RasterDatasetFromWorkflow(BaseModel): """ - parameter for the dataset from workflow handler (body) # noqa: E501 - """ - as_cog: Optional[StrictBool] = Field(True, alias="asCog") + parameter for the dataset from workflow handler (body) + """ # noqa: E501 + as_cog: Optional[StrictBool] = Field(default=True, alias="asCog") description: Optional[StrictStr] = None - display_name: StrictStr = Field(..., alias="displayName") + display_name: StrictStr = Field(alias="displayName") name: Optional[StrictStr] = None - query: RasterQueryRectangle = Field(...) - __properties = ["asCog", "description", "displayName", "name", "query"] + query: QueryRectangle + __properties: ClassVar[List[str]] = ["asCog", "description", "displayName", "name", "query"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> RasterDatasetFromWorkflow: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of RasterDatasetFromWorkflow from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True, - # Note: remove as_cog when set to default - exclude_defaults=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of query if self.query: _dict['query'] = self.query.to_dict() # set to None if description (nullable) is None - # and __fields_set__ contains the field - if self.description is None and "description" in self.__fields_set__: + # and model_fields_set contains the field + if self.description is None and "description" in self.model_fields_set: _dict['description'] = None - # set to None if name (nullable) is None - # and __fields_set__ contains the field - if self.name is None and "name" in self.__fields_set__: - _dict['name'] = None - return _dict @classmethod - def from_dict(cls, obj: dict) -> RasterDatasetFromWorkflow: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of RasterDatasetFromWorkflow from a dict""" if obj is None: return None if not isinstance(obj, dict): - return RasterDatasetFromWorkflow.parse_obj(obj) + return cls.model_validate(obj) - _obj = RasterDatasetFromWorkflow.parse_obj({ - "as_cog": obj.get("asCog") if obj.get("asCog") is not None else True, + _obj = cls.model_validate({ + "asCog": obj.get("asCog") if obj.get("asCog") is not None else True, "description": obj.get("description"), - "display_name": obj.get("displayName"), + "displayName": obj.get("displayName"), "name": obj.get("name"), - "query": RasterQueryRectangle.from_dict(obj.get("query")) if obj.get("query") is not None else None + "query": QueryRectangle.from_dict(obj["query"]) if obj.get("query") is not None else None }) return _obj diff --git a/python/geoengine_openapi_client/models/raster_dataset_from_workflow_result.py b/python/geoengine_openapi_client/models/raster_dataset_from_workflow_result.py index cc3f962c..418199a8 100644 --- a/python/geoengine_openapi_client/models/raster_dataset_from_workflow_result.py +++ b/python/geoengine_openapi_client/models/raster_dataset_from_workflow_result.py @@ -18,54 +18,70 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self class RasterDatasetFromWorkflowResult(BaseModel): """ - response of the dataset from workflow handler # noqa: E501 - """ - dataset: StrictStr = Field(...) - upload: StrictStr = Field(...) - __properties = ["dataset", "upload"] + response of the dataset from workflow handler + """ # noqa: E501 + dataset: StrictStr + upload: StrictStr + __properties: ClassVar[List[str]] = ["dataset", "upload"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> RasterDatasetFromWorkflowResult: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of RasterDatasetFromWorkflowResult from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> RasterDatasetFromWorkflowResult: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of RasterDatasetFromWorkflowResult from a dict""" if obj is None: return None if not isinstance(obj, dict): - return RasterDatasetFromWorkflowResult.parse_obj(obj) + return cls.model_validate(obj) - _obj = RasterDatasetFromWorkflowResult.parse_obj({ + _obj = cls.model_validate({ "dataset": obj.get("dataset"), "upload": obj.get("upload") }) diff --git a/python/geoengine_openapi_client/models/raster_properties_entry_type.py b/python/geoengine_openapi_client/models/raster_properties_entry_type.py index 44a49ab9..ffe87d2f 100644 --- a/python/geoengine_openapi_client/models/raster_properties_entry_type.py +++ b/python/geoengine_openapi_client/models/raster_properties_entry_type.py @@ -13,13 +13,10 @@ """ # noqa: E501 +from __future__ import annotations import json -import pprint -import re # noqa: F401 -from aenum import Enum, no_arg - - - +from enum import Enum +from typing_extensions import Self class RasterPropertiesEntryType(str, Enum): @@ -34,8 +31,8 @@ class RasterPropertiesEntryType(str, Enum): STRING = 'String' @classmethod - def from_json(cls, json_str: str) -> RasterPropertiesEntryType: + def from_json(cls, json_str: str) -> Self: """Create an instance of RasterPropertiesEntryType from a JSON string""" - return RasterPropertiesEntryType(json.loads(json_str)) + return cls(json.loads(json_str)) diff --git a/python/geoengine_openapi_client/models/raster_properties_key.py b/python/geoengine_openapi_client/models/raster_properties_key.py index 5d8c235b..772994e1 100644 --- a/python/geoengine_openapi_client/models/raster_properties_key.py +++ b/python/geoengine_openapi_client/models/raster_properties_key.py @@ -18,59 +18,75 @@ import re # noqa: F401 import json - -from typing import Optional -from pydantic import BaseModel, Field, StrictStr +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self class RasterPropertiesKey(BaseModel): """ RasterPropertiesKey - """ + """ # noqa: E501 domain: Optional[StrictStr] = None - key: StrictStr = Field(...) - __properties = ["domain", "key"] + key: StrictStr + __properties: ClassVar[List[str]] = ["domain", "key"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> RasterPropertiesKey: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of RasterPropertiesKey from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # set to None if domain (nullable) is None - # and __fields_set__ contains the field - if self.domain is None and "domain" in self.__fields_set__: + # and model_fields_set contains the field + if self.domain is None and "domain" in self.model_fields_set: _dict['domain'] = None return _dict @classmethod - def from_dict(cls, obj: dict) -> RasterPropertiesKey: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of RasterPropertiesKey from a dict""" if obj is None: return None if not isinstance(obj, dict): - return RasterPropertiesKey.parse_obj(obj) + return cls.model_validate(obj) - _obj = RasterPropertiesKey.parse_obj({ + _obj = cls.model_validate({ "domain": obj.get("domain"), "key": obj.get("key") }) diff --git a/python/geoengine_openapi_client/models/raster_result_descriptor.py b/python/geoengine_openapi_client/models/raster_result_descriptor.py index 3947a16c..ebd3a694 100644 --- a/python/geoengine_openapi_client/models/raster_result_descriptor.py +++ b/python/geoengine_openapi_client/models/raster_result_descriptor.py @@ -18,57 +18,73 @@ import re # noqa: F401 import json - -from typing import List, Optional -from pydantic import BaseModel, Field, StrictStr, conlist +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional from geoengine_openapi_client.models.raster_band_descriptor import RasterBandDescriptor from geoengine_openapi_client.models.raster_data_type import RasterDataType from geoengine_openapi_client.models.spatial_partition2_d import SpatialPartition2D from geoengine_openapi_client.models.spatial_resolution import SpatialResolution from geoengine_openapi_client.models.time_interval import TimeInterval +from typing import Optional, Set +from typing_extensions import Self class RasterResultDescriptor(BaseModel): """ - A `ResultDescriptor` for raster queries # noqa: E501 - """ - bands: conlist(RasterBandDescriptor) = Field(...) + A `ResultDescriptor` for raster queries + """ # noqa: E501 + bands: List[RasterBandDescriptor] bbox: Optional[SpatialPartition2D] = None - data_type: RasterDataType = Field(..., alias="dataType") + data_type: RasterDataType = Field(alias="dataType") resolution: Optional[SpatialResolution] = None - spatial_reference: StrictStr = Field(..., alias="spatialReference") + spatial_reference: StrictStr = Field(alias="spatialReference") time: Optional[TimeInterval] = None - __properties = ["bands", "bbox", "dataType", "resolution", "spatialReference", "time"] + __properties: ClassVar[List[str]] = ["bands", "bbox", "dataType", "resolution", "spatialReference", "time"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> RasterResultDescriptor: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of RasterResultDescriptor from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of each item in bands (list) _items = [] if self.bands: - for _item in self.bands: - if _item: - _items.append(_item.to_dict()) + for _item_bands in self.bands: + if _item_bands: + _items.append(_item_bands.to_dict()) _dict['bands'] = _items # override the default output from pydantic by calling `to_dict()` of bbox if self.bbox: @@ -80,38 +96,38 @@ def to_dict(self): if self.time: _dict['time'] = self.time.to_dict() # set to None if bbox (nullable) is None - # and __fields_set__ contains the field - if self.bbox is None and "bbox" in self.__fields_set__: + # and model_fields_set contains the field + if self.bbox is None and "bbox" in self.model_fields_set: _dict['bbox'] = None # set to None if resolution (nullable) is None - # and __fields_set__ contains the field - if self.resolution is None and "resolution" in self.__fields_set__: + # and model_fields_set contains the field + if self.resolution is None and "resolution" in self.model_fields_set: _dict['resolution'] = None # set to None if time (nullable) is None - # and __fields_set__ contains the field - if self.time is None and "time" in self.__fields_set__: + # and model_fields_set contains the field + if self.time is None and "time" in self.model_fields_set: _dict['time'] = None return _dict @classmethod - def from_dict(cls, obj: dict) -> RasterResultDescriptor: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of RasterResultDescriptor from a dict""" if obj is None: return None if not isinstance(obj, dict): - return RasterResultDescriptor.parse_obj(obj) - - _obj = RasterResultDescriptor.parse_obj({ - "bands": [RasterBandDescriptor.from_dict(_item) for _item in obj.get("bands")] if obj.get("bands") is not None else None, - "bbox": SpatialPartition2D.from_dict(obj.get("bbox")) if obj.get("bbox") is not None else None, - "data_type": obj.get("dataType"), - "resolution": SpatialResolution.from_dict(obj.get("resolution")) if obj.get("resolution") is not None else None, - "spatial_reference": obj.get("spatialReference"), - "time": TimeInterval.from_dict(obj.get("time")) if obj.get("time") is not None else None + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "bands": [RasterBandDescriptor.from_dict(_item) for _item in obj["bands"]] if obj.get("bands") is not None else None, + "bbox": SpatialPartition2D.from_dict(obj["bbox"]) if obj.get("bbox") is not None else None, + "dataType": obj.get("dataType"), + "resolution": SpatialResolution.from_dict(obj["resolution"]) if obj.get("resolution") is not None else None, + "spatialReference": obj.get("spatialReference"), + "time": TimeInterval.from_dict(obj["time"]) if obj.get("time") is not None else None }) return _obj diff --git a/python/geoengine_openapi_client/models/raster_stream_websocket_result_type.py b/python/geoengine_openapi_client/models/raster_stream_websocket_result_type.py index de617a26..27e9af1f 100644 --- a/python/geoengine_openapi_client/models/raster_stream_websocket_result_type.py +++ b/python/geoengine_openapi_client/models/raster_stream_websocket_result_type.py @@ -13,13 +13,10 @@ """ # noqa: E501 +from __future__ import annotations import json -import pprint -import re # noqa: F401 -from aenum import Enum, no_arg - - - +from enum import Enum +from typing_extensions import Self class RasterStreamWebsocketResultType(str, Enum): @@ -33,8 +30,8 @@ class RasterStreamWebsocketResultType(str, Enum): ARROW = 'arrow' @classmethod - def from_json(cls, json_str: str) -> RasterStreamWebsocketResultType: + def from_json(cls, json_str: str) -> Self: """Create an instance of RasterStreamWebsocketResultType from a JSON string""" - return RasterStreamWebsocketResultType(json.loads(json_str)) + return cls(json.loads(json_str)) diff --git a/python/geoengine_openapi_client/models/raster_symbology.py b/python/geoengine_openapi_client/models/raster_symbology.py index d3b83553..5cb39404 100644 --- a/python/geoengine_openapi_client/models/raster_symbology.py +++ b/python/geoengine_openapi_client/models/raster_symbology.py @@ -18,68 +18,84 @@ import re # noqa: F401 import json - -from typing import Union -from pydantic import BaseModel, Field, StrictFloat, StrictInt, StrictStr, validator +from pydantic import BaseModel, ConfigDict, Field, StrictFloat, StrictInt, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Union from geoengine_openapi_client.models.raster_colorizer import RasterColorizer +from typing import Optional, Set +from typing_extensions import Self class RasterSymbology(BaseModel): """ RasterSymbology - """ - opacity: Union[StrictFloat, StrictInt] = Field(...) - raster_colorizer: RasterColorizer = Field(..., alias="rasterColorizer") - type: StrictStr = Field(...) - __properties = ["opacity", "rasterColorizer", "type"] + """ # noqa: E501 + opacity: Union[StrictFloat, StrictInt] + raster_colorizer: RasterColorizer = Field(alias="rasterColorizer") + type: StrictStr + __properties: ClassVar[List[str]] = ["opacity", "rasterColorizer", "type"] - @validator('type') + @field_validator('type') def type_validate_enum(cls, value): """Validates the enum""" - if value not in ('raster', 'point', 'line', 'polygon'): - raise ValueError("must be one of enum values ('raster', 'point', 'line', 'polygon')") + if value not in set(['raster']): + raise ValueError("must be one of enum values ('raster')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> RasterSymbology: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of RasterSymbology from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of raster_colorizer if self.raster_colorizer: _dict['rasterColorizer'] = self.raster_colorizer.to_dict() return _dict @classmethod - def from_dict(cls, obj: dict) -> RasterSymbology: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of RasterSymbology from a dict""" if obj is None: return None if not isinstance(obj, dict): - return RasterSymbology.parse_obj(obj) + return cls.model_validate(obj) - _obj = RasterSymbology.parse_obj({ + _obj = cls.model_validate({ "opacity": obj.get("opacity"), - "raster_colorizer": RasterColorizer.from_dict(obj.get("rasterColorizer")) if obj.get("rasterColorizer") is not None else None, + "rasterColorizer": RasterColorizer.from_dict(obj["rasterColorizer"]) if obj.get("rasterColorizer") is not None else None, "type": obj.get("type") }) return _obj diff --git a/python/geoengine_openapi_client/models/resource.py b/python/geoengine_openapi_client/models/resource.py index 85c8a0f5..945c16a1 100644 --- a/python/geoengine_openapi_client/models/resource.py +++ b/python/geoengine_openapi_client/models/resource.py @@ -14,20 +14,18 @@ from __future__ import annotations -from inspect import getfullargspec import json import pprint -import re # noqa: F401 - +from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator from typing import Any, List, Optional -from pydantic import BaseModel, Field, StrictStr, ValidationError, validator from geoengine_openapi_client.models.dataset_resource import DatasetResource from geoengine_openapi_client.models.layer_collection_resource import LayerCollectionResource from geoengine_openapi_client.models.layer_resource import LayerResource from geoengine_openapi_client.models.ml_model_resource import MlModelResource from geoengine_openapi_client.models.project_resource import ProjectResource -from typing import Union, Any, List, TYPE_CHECKING from pydantic import StrictStr, Field +from typing import Union, List, Set, Optional, Dict +from typing_extensions import Literal, Self RESOURCE_ONE_OF_SCHEMAS = ["DatasetResource", "LayerCollectionResource", "LayerResource", "MlModelResource", "ProjectResource"] @@ -45,16 +43,16 @@ class Resource(BaseModel): oneof_schema_4_validator: Optional[DatasetResource] = None # data type: MlModelResource oneof_schema_5_validator: Optional[MlModelResource] = None - if TYPE_CHECKING: - actual_instance: Union[DatasetResource, LayerCollectionResource, LayerResource, MlModelResource, ProjectResource] - else: - actual_instance: Any - one_of_schemas: List[str] = Field(RESOURCE_ONE_OF_SCHEMAS, const=True) + actual_instance: Optional[Union[DatasetResource, LayerCollectionResource, LayerResource, MlModelResource, ProjectResource]] = None + one_of_schemas: Set[str] = { "DatasetResource", "LayerCollectionResource", "LayerResource", "MlModelResource", "ProjectResource" } + + model_config = ConfigDict( + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - validate_assignment = True - discriminator_value_class_map = { + discriminator_value_class_map: Dict[str, str] = { } def __init__(self, *args, **kwargs) -> None: @@ -67,9 +65,9 @@ def __init__(self, *args, **kwargs) -> None: else: super().__init__(**kwargs) - @validator('actual_instance') + @field_validator('actual_instance') def actual_instance_must_validate_oneof(cls, v): - instance = Resource.construct() + instance = Resource.model_construct() error_messages = [] match = 0 # validate data type: LayerResource @@ -107,13 +105,13 @@ def actual_instance_must_validate_oneof(cls, v): return v @classmethod - def from_dict(cls, obj: dict) -> Resource: + def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: return cls.from_json(json.dumps(obj)) @classmethod - def from_json(cls, json_str: str) -> Resource: + def from_json(cls, json_str: str) -> Self: """Returns the object represented by the json string""" - instance = Resource.construct() + instance = cls.model_construct() error_messages = [] match = 0 @@ -123,52 +121,52 @@ def from_json(cls, json_str: str) -> Resource: raise ValueError("Failed to lookup data type from the field `type` in the input.") # check if data type is `DatasetResource` - if _data_type == "DatasetResource": + if _data_type == "dataset": instance.actual_instance = DatasetResource.from_json(json_str) return instance - # check if data type is `LayerCollectionResource` - if _data_type == "LayerCollectionResource": - instance.actual_instance = LayerCollectionResource.from_json(json_str) - return instance - # check if data type is `LayerResource` - if _data_type == "LayerResource": + if _data_type == "layer": instance.actual_instance = LayerResource.from_json(json_str) return instance + # check if data type is `LayerCollectionResource` + if _data_type == "layerCollection": + instance.actual_instance = LayerCollectionResource.from_json(json_str) + return instance + # check if data type is `MlModelResource` - if _data_type == "MlModelResource": + if _data_type == "mlModel": instance.actual_instance = MlModelResource.from_json(json_str) return instance # check if data type is `ProjectResource` - if _data_type == "ProjectResource": + if _data_type == "project": instance.actual_instance = ProjectResource.from_json(json_str) return instance # check if data type is `DatasetResource` - if _data_type == "dataset": + if _data_type == "DatasetResource": instance.actual_instance = DatasetResource.from_json(json_str) return instance - # check if data type is `LayerResource` - if _data_type == "layer": - instance.actual_instance = LayerResource.from_json(json_str) - return instance - # check if data type is `LayerCollectionResource` - if _data_type == "layerCollection": + if _data_type == "LayerCollectionResource": instance.actual_instance = LayerCollectionResource.from_json(json_str) return instance + # check if data type is `LayerResource` + if _data_type == "LayerResource": + instance.actual_instance = LayerResource.from_json(json_str) + return instance + # check if data type is `MlModelResource` - if _data_type == "mlModel": + if _data_type == "MlModelResource": instance.actual_instance = MlModelResource.from_json(json_str) return instance # check if data type is `ProjectResource` - if _data_type == "project": + if _data_type == "ProjectResource": instance.actual_instance = ProjectResource.from_json(json_str) return instance @@ -217,19 +215,17 @@ def to_json(self) -> str: if self.actual_instance is None: return "null" - to_json = getattr(self.actual_instance, "to_json", None) - if callable(to_json): + if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): return self.actual_instance.to_json() else: return json.dumps(self.actual_instance) - def to_dict(self) -> dict: + def to_dict(self) -> Optional[Union[Dict[str, Any], DatasetResource, LayerCollectionResource, LayerResource, MlModelResource, ProjectResource]]: """Returns the dict representation of the actual instance""" if self.actual_instance is None: return None - to_dict = getattr(self.actual_instance, "to_dict", None) - if callable(to_dict): + if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): return self.actual_instance.to_dict() else: # primitive type @@ -237,6 +233,6 @@ def to_dict(self) -> dict: def to_str(self) -> str: """Returns the string representation of the actual instance""" - return pprint.pformat(self.dict()) + return pprint.pformat(self.model_dump()) diff --git a/python/geoengine_openapi_client/models/resource_id.py b/python/geoengine_openapi_client/models/resource_id.py index 4ecdc1d6..d3c65100 100644 --- a/python/geoengine_openapi_client/models/resource_id.py +++ b/python/geoengine_openapi_client/models/resource_id.py @@ -14,20 +14,18 @@ from __future__ import annotations -from inspect import getfullargspec import json import pprint -import re # noqa: F401 - +from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator from typing import Any, List, Optional -from pydantic import BaseModel, Field, StrictStr, ValidationError, validator from geoengine_openapi_client.models.resource_id_dataset_id import ResourceIdDatasetId from geoengine_openapi_client.models.resource_id_layer import ResourceIdLayer from geoengine_openapi_client.models.resource_id_layer_collection import ResourceIdLayerCollection from geoengine_openapi_client.models.resource_id_ml_model import ResourceIdMlModel from geoengine_openapi_client.models.resource_id_project import ResourceIdProject -from typing import Union, Any, List, TYPE_CHECKING from pydantic import StrictStr, Field +from typing import Union, List, Set, Optional, Dict +from typing_extensions import Literal, Self RESOURCEID_ONE_OF_SCHEMAS = ["ResourceIdDatasetId", "ResourceIdLayer", "ResourceIdLayerCollection", "ResourceIdMlModel", "ResourceIdProject"] @@ -45,16 +43,16 @@ class ResourceId(BaseModel): oneof_schema_4_validator: Optional[ResourceIdDatasetId] = None # data type: ResourceIdMlModel oneof_schema_5_validator: Optional[ResourceIdMlModel] = None - if TYPE_CHECKING: - actual_instance: Union[ResourceIdDatasetId, ResourceIdLayer, ResourceIdLayerCollection, ResourceIdMlModel, ResourceIdProject] - else: - actual_instance: Any - one_of_schemas: List[str] = Field(RESOURCEID_ONE_OF_SCHEMAS, const=True) + actual_instance: Optional[Union[ResourceIdDatasetId, ResourceIdLayer, ResourceIdLayerCollection, ResourceIdMlModel, ResourceIdProject]] = None + one_of_schemas: Set[str] = { "ResourceIdDatasetId", "ResourceIdLayer", "ResourceIdLayerCollection", "ResourceIdMlModel", "ResourceIdProject" } + + model_config = ConfigDict( + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - validate_assignment = True - discriminator_value_class_map = { + discriminator_value_class_map: Dict[str, str] = { } def __init__(self, *args, **kwargs) -> None: @@ -67,9 +65,9 @@ def __init__(self, *args, **kwargs) -> None: else: super().__init__(**kwargs) - @validator('actual_instance') + @field_validator('actual_instance') def actual_instance_must_validate_oneof(cls, v): - instance = ResourceId.construct() + instance = ResourceId.model_construct() error_messages = [] match = 0 # validate data type: ResourceIdLayer @@ -107,13 +105,13 @@ def actual_instance_must_validate_oneof(cls, v): return v @classmethod - def from_dict(cls, obj: dict) -> ResourceId: + def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: return cls.from_json(json.dumps(obj)) @classmethod - def from_json(cls, json_str: str) -> ResourceId: + def from_json(cls, json_str: str) -> Self: """Returns the object represented by the json string""" - instance = ResourceId.construct() + instance = cls.model_construct() error_messages = [] match = 0 @@ -217,19 +215,17 @@ def to_json(self) -> str: if self.actual_instance is None: return "null" - to_json = getattr(self.actual_instance, "to_json", None) - if callable(to_json): + if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): return self.actual_instance.to_json() else: return json.dumps(self.actual_instance) - def to_dict(self) -> dict: + def to_dict(self) -> Optional[Union[Dict[str, Any], ResourceIdDatasetId, ResourceIdLayer, ResourceIdLayerCollection, ResourceIdMlModel, ResourceIdProject]]: """Returns the dict representation of the actual instance""" if self.actual_instance is None: return None - to_dict = getattr(self.actual_instance, "to_dict", None) - if callable(to_dict): + if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): return self.actual_instance.to_dict() else: # primitive type @@ -237,6 +233,6 @@ def to_dict(self) -> dict: def to_str(self) -> str: """Returns the string representation of the actual instance""" - return pprint.pformat(self.dict()) + return pprint.pformat(self.model_dump()) diff --git a/python/geoengine_openapi_client/models/resource_id_dataset_id.py b/python/geoengine_openapi_client/models/resource_id_dataset_id.py index e877e979..a7569284 100644 --- a/python/geoengine_openapi_client/models/resource_id_dataset_id.py +++ b/python/geoengine_openapi_client/models/resource_id_dataset_id.py @@ -18,61 +18,77 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr, validator +from pydantic import BaseModel, ConfigDict, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self class ResourceIdDatasetId(BaseModel): """ ResourceIdDatasetId - """ - id: StrictStr = Field(...) - type: StrictStr = Field(...) - __properties = ["id", "type"] + """ # noqa: E501 + id: StrictStr + type: StrictStr + __properties: ClassVar[List[str]] = ["id", "type"] - @validator('type') + @field_validator('type') def type_validate_enum(cls, value): """Validates the enum""" - if value not in ('DatasetId'): + if value not in set(['DatasetId']): raise ValueError("must be one of enum values ('DatasetId')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> ResourceIdDatasetId: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of ResourceIdDatasetId from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> ResourceIdDatasetId: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of ResourceIdDatasetId from a dict""" if obj is None: return None if not isinstance(obj, dict): - return ResourceIdDatasetId.parse_obj(obj) + return cls.model_validate(obj) - _obj = ResourceIdDatasetId.parse_obj({ + _obj = cls.model_validate({ "id": obj.get("id"), "type": obj.get("type") }) diff --git a/python/geoengine_openapi_client/models/resource_id_layer.py b/python/geoengine_openapi_client/models/resource_id_layer.py index e2f90be2..8522d767 100644 --- a/python/geoengine_openapi_client/models/resource_id_layer.py +++ b/python/geoengine_openapi_client/models/resource_id_layer.py @@ -18,61 +18,77 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr, validator +from pydantic import BaseModel, ConfigDict, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self class ResourceIdLayer(BaseModel): """ ResourceIdLayer - """ - id: StrictStr = Field(...) - type: StrictStr = Field(...) - __properties = ["id", "type"] + """ # noqa: E501 + id: StrictStr + type: StrictStr + __properties: ClassVar[List[str]] = ["id", "type"] - @validator('type') + @field_validator('type') def type_validate_enum(cls, value): """Validates the enum""" - if value not in ('Layer', 'LayerCollection', 'Project', 'DatasetId', 'MlModel'): - raise ValueError("must be one of enum values ('Layer', 'LayerCollection', 'Project', 'DatasetId', 'MlModel')") + if value not in set(['Layer']): + raise ValueError("must be one of enum values ('Layer')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> ResourceIdLayer: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of ResourceIdLayer from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> ResourceIdLayer: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of ResourceIdLayer from a dict""" if obj is None: return None if not isinstance(obj, dict): - return ResourceIdLayer.parse_obj(obj) + return cls.model_validate(obj) - _obj = ResourceIdLayer.parse_obj({ + _obj = cls.model_validate({ "id": obj.get("id"), "type": obj.get("type") }) diff --git a/python/geoengine_openapi_client/models/resource_id_layer_collection.py b/python/geoengine_openapi_client/models/resource_id_layer_collection.py index e0097de3..045fcb6d 100644 --- a/python/geoengine_openapi_client/models/resource_id_layer_collection.py +++ b/python/geoengine_openapi_client/models/resource_id_layer_collection.py @@ -18,61 +18,77 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr, validator +from pydantic import BaseModel, ConfigDict, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self class ResourceIdLayerCollection(BaseModel): """ ResourceIdLayerCollection - """ - id: StrictStr = Field(...) - type: StrictStr = Field(...) - __properties = ["id", "type"] + """ # noqa: E501 + id: StrictStr + type: StrictStr + __properties: ClassVar[List[str]] = ["id", "type"] - @validator('type') + @field_validator('type') def type_validate_enum(cls, value): """Validates the enum""" - if value not in ('LayerCollection'): + if value not in set(['LayerCollection']): raise ValueError("must be one of enum values ('LayerCollection')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> ResourceIdLayerCollection: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of ResourceIdLayerCollection from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> ResourceIdLayerCollection: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of ResourceIdLayerCollection from a dict""" if obj is None: return None if not isinstance(obj, dict): - return ResourceIdLayerCollection.parse_obj(obj) + return cls.model_validate(obj) - _obj = ResourceIdLayerCollection.parse_obj({ + _obj = cls.model_validate({ "id": obj.get("id"), "type": obj.get("type") }) diff --git a/python/geoengine_openapi_client/models/resource_id_ml_model.py b/python/geoengine_openapi_client/models/resource_id_ml_model.py index 065318dc..425a99b4 100644 --- a/python/geoengine_openapi_client/models/resource_id_ml_model.py +++ b/python/geoengine_openapi_client/models/resource_id_ml_model.py @@ -18,61 +18,77 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr, validator +from pydantic import BaseModel, ConfigDict, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self class ResourceIdMlModel(BaseModel): """ ResourceIdMlModel - """ - id: StrictStr = Field(...) - type: StrictStr = Field(...) - __properties = ["id", "type"] + """ # noqa: E501 + id: StrictStr + type: StrictStr + __properties: ClassVar[List[str]] = ["id", "type"] - @validator('type') + @field_validator('type') def type_validate_enum(cls, value): """Validates the enum""" - if value not in ('MlModel'): + if value not in set(['MlModel']): raise ValueError("must be one of enum values ('MlModel')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> ResourceIdMlModel: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of ResourceIdMlModel from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> ResourceIdMlModel: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of ResourceIdMlModel from a dict""" if obj is None: return None if not isinstance(obj, dict): - return ResourceIdMlModel.parse_obj(obj) + return cls.model_validate(obj) - _obj = ResourceIdMlModel.parse_obj({ + _obj = cls.model_validate({ "id": obj.get("id"), "type": obj.get("type") }) diff --git a/python/geoengine_openapi_client/models/resource_id_project.py b/python/geoengine_openapi_client/models/resource_id_project.py index 76a5a19c..f295382e 100644 --- a/python/geoengine_openapi_client/models/resource_id_project.py +++ b/python/geoengine_openapi_client/models/resource_id_project.py @@ -18,61 +18,77 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr, validator +from pydantic import BaseModel, ConfigDict, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self class ResourceIdProject(BaseModel): """ ResourceIdProject - """ - id: StrictStr = Field(...) - type: StrictStr = Field(...) - __properties = ["id", "type"] + """ # noqa: E501 + id: StrictStr + type: StrictStr + __properties: ClassVar[List[str]] = ["id", "type"] - @validator('type') + @field_validator('type') def type_validate_enum(cls, value): """Validates the enum""" - if value not in ('Project'): + if value not in set(['Project']): raise ValueError("must be one of enum values ('Project')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> ResourceIdProject: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of ResourceIdProject from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> ResourceIdProject: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of ResourceIdProject from a dict""" if obj is None: return None if not isinstance(obj, dict): - return ResourceIdProject.parse_obj(obj) + return cls.model_validate(obj) - _obj = ResourceIdProject.parse_obj({ + _obj = cls.model_validate({ "id": obj.get("id"), "type": obj.get("type") }) diff --git a/python/geoengine_openapi_client/models/role.py b/python/geoengine_openapi_client/models/role.py index d6723964..ea00a20b 100644 --- a/python/geoengine_openapi_client/models/role.py +++ b/python/geoengine_openapi_client/models/role.py @@ -18,54 +18,70 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self class Role(BaseModel): """ Role - """ - id: StrictStr = Field(...) - name: StrictStr = Field(...) - __properties = ["id", "name"] + """ # noqa: E501 + id: StrictStr + name: StrictStr + __properties: ClassVar[List[str]] = ["id", "name"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> Role: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of Role from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> Role: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of Role from a dict""" if obj is None: return None if not isinstance(obj, dict): - return Role.parse_obj(obj) + return cls.model_validate(obj) - _obj = Role.parse_obj({ + _obj = cls.model_validate({ "id": obj.get("id"), "name": obj.get("name") }) diff --git a/python/geoengine_openapi_client/models/role_description.py b/python/geoengine_openapi_client/models/role_description.py index 8e9d8b35..2836111f 100644 --- a/python/geoengine_openapi_client/models/role_description.py +++ b/python/geoengine_openapi_client/models/role_description.py @@ -18,60 +18,76 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictBool +from pydantic import BaseModel, ConfigDict, StrictBool +from typing import Any, ClassVar, Dict, List from geoengine_openapi_client.models.role import Role +from typing import Optional, Set +from typing_extensions import Self class RoleDescription(BaseModel): """ RoleDescription - """ - individual: StrictBool = Field(...) - role: Role = Field(...) - __properties = ["individual", "role"] + """ # noqa: E501 + individual: StrictBool + role: Role + __properties: ClassVar[List[str]] = ["individual", "role"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> RoleDescription: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of RoleDescription from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of role if self.role: _dict['role'] = self.role.to_dict() return _dict @classmethod - def from_dict(cls, obj: dict) -> RoleDescription: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of RoleDescription from a dict""" if obj is None: return None if not isinstance(obj, dict): - return RoleDescription.parse_obj(obj) + return cls.model_validate(obj) - _obj = RoleDescription.parse_obj({ + _obj = cls.model_validate({ "individual": obj.get("individual"), - "role": Role.from_dict(obj.get("role")) if obj.get("role") is not None else None + "role": Role.from_dict(obj["role"]) if obj.get("role") is not None else None }) return _obj diff --git a/python/geoengine_openapi_client/models/search_capabilities.py b/python/geoengine_openapi_client/models/search_capabilities.py index e4b6ba5a..e2d80aaa 100644 --- a/python/geoengine_openapi_client/models/search_capabilities.py +++ b/python/geoengine_openapi_client/models/search_capabilities.py @@ -18,67 +18,83 @@ import re # noqa: F401 import json - -from typing import List, Optional -from pydantic import BaseModel, Field, StrictBool, StrictStr, conlist +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional from geoengine_openapi_client.models.search_types import SearchTypes +from typing import Optional, Set +from typing_extensions import Self class SearchCapabilities(BaseModel): """ SearchCapabilities - """ - autocomplete: StrictBool = Field(...) - filters: Optional[conlist(StrictStr)] = None - search_types: SearchTypes = Field(..., alias="searchTypes") - __properties = ["autocomplete", "filters", "searchTypes"] + """ # noqa: E501 + autocomplete: StrictBool + filters: Optional[List[StrictStr]] = None + search_types: SearchTypes = Field(alias="searchTypes") + __properties: ClassVar[List[str]] = ["autocomplete", "filters", "searchTypes"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> SearchCapabilities: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of SearchCapabilities from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of search_types if self.search_types: _dict['searchTypes'] = self.search_types.to_dict() # set to None if filters (nullable) is None - # and __fields_set__ contains the field - if self.filters is None and "filters" in self.__fields_set__: + # and model_fields_set contains the field + if self.filters is None and "filters" in self.model_fields_set: _dict['filters'] = None return _dict @classmethod - def from_dict(cls, obj: dict) -> SearchCapabilities: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of SearchCapabilities from a dict""" if obj is None: return None if not isinstance(obj, dict): - return SearchCapabilities.parse_obj(obj) + return cls.model_validate(obj) - _obj = SearchCapabilities.parse_obj({ + _obj = cls.model_validate({ "autocomplete": obj.get("autocomplete"), "filters": obj.get("filters"), - "search_types": SearchTypes.from_dict(obj.get("searchTypes")) if obj.get("searchTypes") is not None else None + "searchTypes": SearchTypes.from_dict(obj["searchTypes"]) if obj.get("searchTypes") is not None else None }) return _obj diff --git a/python/geoengine_openapi_client/models/search_type.py b/python/geoengine_openapi_client/models/search_type.py index c4b7683d..3a608b52 100644 --- a/python/geoengine_openapi_client/models/search_type.py +++ b/python/geoengine_openapi_client/models/search_type.py @@ -13,13 +13,10 @@ """ # noqa: E501 +from __future__ import annotations import json -import pprint -import re # noqa: F401 -from aenum import Enum, no_arg - - - +from enum import Enum +from typing_extensions import Self class SearchType(str, Enum): @@ -34,8 +31,8 @@ class SearchType(str, Enum): PREFIX = 'prefix' @classmethod - def from_json(cls, json_str: str) -> SearchType: + def from_json(cls, json_str: str) -> Self: """Create an instance of SearchType from a JSON string""" - return SearchType(json.loads(json_str)) + return cls(json.loads(json_str)) diff --git a/python/geoengine_openapi_client/models/search_types.py b/python/geoengine_openapi_client/models/search_types.py index e5d8e9a6..1571ad05 100644 --- a/python/geoengine_openapi_client/models/search_types.py +++ b/python/geoengine_openapi_client/models/search_types.py @@ -18,54 +18,70 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictBool +from pydantic import BaseModel, ConfigDict, StrictBool +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self class SearchTypes(BaseModel): """ SearchTypes - """ - fulltext: StrictBool = Field(...) - prefix: StrictBool = Field(...) - __properties = ["fulltext", "prefix"] + """ # noqa: E501 + fulltext: StrictBool + prefix: StrictBool + __properties: ClassVar[List[str]] = ["fulltext", "prefix"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> SearchTypes: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of SearchTypes from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> SearchTypes: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of SearchTypes from a dict""" if obj is None: return None if not isinstance(obj, dict): - return SearchTypes.parse_obj(obj) + return cls.model_validate(obj) - _obj = SearchTypes.parse_obj({ + _obj = cls.model_validate({ "fulltext": obj.get("fulltext"), "prefix": obj.get("prefix") }) diff --git a/python/geoengine_openapi_client/models/server_info.py b/python/geoengine_openapi_client/models/server_info.py index 7c706fca..98818a9d 100644 --- a/python/geoengine_openapi_client/models/server_info.py +++ b/python/geoengine_openapi_client/models/server_info.py @@ -18,58 +18,74 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self class ServerInfo(BaseModel): """ ServerInfo - """ - build_date: StrictStr = Field(..., alias="buildDate") - commit_hash: StrictStr = Field(..., alias="commitHash") - features: StrictStr = Field(...) - version: StrictStr = Field(...) - __properties = ["buildDate", "commitHash", "features", "version"] + """ # noqa: E501 + build_date: StrictStr = Field(alias="buildDate") + commit_hash: StrictStr = Field(alias="commitHash") + features: StrictStr + version: StrictStr + __properties: ClassVar[List[str]] = ["buildDate", "commitHash", "features", "version"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> ServerInfo: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of ServerInfo from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> ServerInfo: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of ServerInfo from a dict""" if obj is None: return None if not isinstance(obj, dict): - return ServerInfo.parse_obj(obj) + return cls.model_validate(obj) - _obj = ServerInfo.parse_obj({ - "build_date": obj.get("buildDate"), - "commit_hash": obj.get("commitHash"), + _obj = cls.model_validate({ + "buildDate": obj.get("buildDate"), + "commitHash": obj.get("commitHash"), "features": obj.get("features"), "version": obj.get("version") }) diff --git a/python/geoengine_openapi_client/models/single_band_raster_colorizer.py b/python/geoengine_openapi_client/models/single_band_raster_colorizer.py index 69054635..98a6e928 100644 --- a/python/geoengine_openapi_client/models/single_band_raster_colorizer.py +++ b/python/geoengine_openapi_client/models/single_band_raster_colorizer.py @@ -18,68 +18,85 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr, conint, validator +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing_extensions import Annotated from geoengine_openapi_client.models.colorizer import Colorizer +from typing import Optional, Set +from typing_extensions import Self class SingleBandRasterColorizer(BaseModel): """ SingleBandRasterColorizer - """ - band: conint(strict=True, ge=0) = Field(...) - band_colorizer: Colorizer = Field(..., alias="bandColorizer") - type: StrictStr = Field(...) - __properties = ["band", "bandColorizer", "type"] + """ # noqa: E501 + band: Annotated[int, Field(strict=True, ge=0)] + band_colorizer: Colorizer = Field(alias="bandColorizer") + type: StrictStr + __properties: ClassVar[List[str]] = ["band", "bandColorizer", "type"] - @validator('type') + @field_validator('type') def type_validate_enum(cls, value): """Validates the enum""" - if value not in ('singleBand', 'multiBand'): - raise ValueError("must be one of enum values ('singleBand', 'multiBand')") + if value not in set(['singleBand']): + raise ValueError("must be one of enum values ('singleBand')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> SingleBandRasterColorizer: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of SingleBandRasterColorizer from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of band_colorizer if self.band_colorizer: _dict['bandColorizer'] = self.band_colorizer.to_dict() return _dict @classmethod - def from_dict(cls, obj: dict) -> SingleBandRasterColorizer: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of SingleBandRasterColorizer from a dict""" if obj is None: return None if not isinstance(obj, dict): - return SingleBandRasterColorizer.parse_obj(obj) + return cls.model_validate(obj) - _obj = SingleBandRasterColorizer.parse_obj({ + _obj = cls.model_validate({ "band": obj.get("band"), - "band_colorizer": Colorizer.from_dict(obj.get("bandColorizer")) if obj.get("bandColorizer") is not None else None, + "bandColorizer": Colorizer.from_dict(obj["bandColorizer"]) if obj.get("bandColorizer") is not None else None, "type": obj.get("type") }) return _obj diff --git a/python/geoengine_openapi_client/models/spatial_partition2_d.py b/python/geoengine_openapi_client/models/spatial_partition2_d.py index aa6518fc..d853e7f8 100644 --- a/python/geoengine_openapi_client/models/spatial_partition2_d.py +++ b/python/geoengine_openapi_client/models/spatial_partition2_d.py @@ -18,43 +18,59 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List from geoengine_openapi_client.models.coordinate2_d import Coordinate2D +from typing import Optional, Set +from typing_extensions import Self class SpatialPartition2D(BaseModel): """ - A partition of space that include the upper left but excludes the lower right coordinate # noqa: E501 - """ - lower_right_coordinate: Coordinate2D = Field(..., alias="lowerRightCoordinate") - upper_left_coordinate: Coordinate2D = Field(..., alias="upperLeftCoordinate") - __properties = ["lowerRightCoordinate", "upperLeftCoordinate"] + A partition of space that include the upper left but excludes the lower right coordinate + """ # noqa: E501 + lower_right_coordinate: Coordinate2D = Field(alias="lowerRightCoordinate") + upper_left_coordinate: Coordinate2D = Field(alias="upperLeftCoordinate") + __properties: ClassVar[List[str]] = ["lowerRightCoordinate", "upperLeftCoordinate"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> SpatialPartition2D: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of SpatialPartition2D from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of lower_right_coordinate if self.lower_right_coordinate: _dict['lowerRightCoordinate'] = self.lower_right_coordinate.to_dict() @@ -64,17 +80,17 @@ def to_dict(self): return _dict @classmethod - def from_dict(cls, obj: dict) -> SpatialPartition2D: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of SpatialPartition2D from a dict""" if obj is None: return None if not isinstance(obj, dict): - return SpatialPartition2D.parse_obj(obj) + return cls.model_validate(obj) - _obj = SpatialPartition2D.parse_obj({ - "lower_right_coordinate": Coordinate2D.from_dict(obj.get("lowerRightCoordinate")) if obj.get("lowerRightCoordinate") is not None else None, - "upper_left_coordinate": Coordinate2D.from_dict(obj.get("upperLeftCoordinate")) if obj.get("upperLeftCoordinate") is not None else None + _obj = cls.model_validate({ + "lowerRightCoordinate": Coordinate2D.from_dict(obj["lowerRightCoordinate"]) if obj.get("lowerRightCoordinate") is not None else None, + "upperLeftCoordinate": Coordinate2D.from_dict(obj["upperLeftCoordinate"]) if obj.get("upperLeftCoordinate") is not None else None }) return _obj diff --git a/python/geoengine_openapi_client/models/spatial_reference_authority.py b/python/geoengine_openapi_client/models/spatial_reference_authority.py index f0445a59..a5719f31 100644 --- a/python/geoengine_openapi_client/models/spatial_reference_authority.py +++ b/python/geoengine_openapi_client/models/spatial_reference_authority.py @@ -13,13 +13,10 @@ """ # noqa: E501 +from __future__ import annotations import json -import pprint -import re # noqa: F401 -from aenum import Enum, no_arg - - - +from enum import Enum +from typing_extensions import Self class SpatialReferenceAuthority(str, Enum): @@ -36,8 +33,8 @@ class SpatialReferenceAuthority(str, Enum): ESRI = 'ESRI' @classmethod - def from_json(cls, json_str: str) -> SpatialReferenceAuthority: + def from_json(cls, json_str: str) -> Self: """Create an instance of SpatialReferenceAuthority from a JSON string""" - return SpatialReferenceAuthority(json.loads(json_str)) + return cls(json.loads(json_str)) diff --git a/python/geoengine_openapi_client/models/spatial_reference_specification.py b/python/geoengine_openapi_client/models/spatial_reference_specification.py index 29195af5..62d932eb 100644 --- a/python/geoengine_openapi_client/models/spatial_reference_specification.py +++ b/python/geoengine_openapi_client/models/spatial_reference_specification.py @@ -18,79 +18,91 @@ import re # noqa: F401 import json - -from typing import List, Optional -from pydantic import BaseModel, Field, StrictStr, conlist +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing_extensions import Annotated from geoengine_openapi_client.models.axis_order import AxisOrder from geoengine_openapi_client.models.bounding_box2_d import BoundingBox2D +from typing import Optional, Set +from typing_extensions import Self class SpatialReferenceSpecification(BaseModel): """ - The specification of a spatial reference, where extent and axis labels are given in natural order (x, y) = (east, north) # noqa: E501 - """ - axis_labels: Optional[conlist(StrictStr, max_items=2, min_items=2)] = Field(None, alias="axisLabels") - axis_order: Optional[AxisOrder] = Field(None, alias="axisOrder") - extent: BoundingBox2D = Field(...) - name: StrictStr = Field(...) - proj_string: StrictStr = Field(..., alias="projString") - spatial_reference: StrictStr = Field(..., alias="spatialReference") - __properties = ["axisLabels", "axisOrder", "extent", "name", "projString", "spatialReference"] - - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + The specification of a spatial reference, where extent and axis labels are given in natural order (x, y) = (east, north) + """ # noqa: E501 + axis_labels: Optional[Annotated[List[StrictStr], Field(min_length=2, max_length=2)]] = Field(default=None, alias="axisLabels") + axis_order: Optional[AxisOrder] = Field(default=None, alias="axisOrder") + extent: BoundingBox2D + name: StrictStr + proj_string: StrictStr = Field(alias="projString") + spatial_reference: StrictStr = Field(alias="spatialReference") + __properties: ClassVar[List[str]] = ["axisLabels", "axisOrder", "extent", "name", "projString", "spatialReference"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> SpatialReferenceSpecification: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of SpatialReferenceSpecification from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of extent if self.extent: _dict['extent'] = self.extent.to_dict() - # set to None if axis_labels (nullable) is None - # and __fields_set__ contains the field - if self.axis_labels is None and "axis_labels" in self.__fields_set__: - _dict['axisLabels'] = None - # set to None if axis_order (nullable) is None - # and __fields_set__ contains the field - if self.axis_order is None and "axis_order" in self.__fields_set__: + # and model_fields_set contains the field + if self.axis_order is None and "axis_order" in self.model_fields_set: _dict['axisOrder'] = None return _dict @classmethod - def from_dict(cls, obj: dict) -> SpatialReferenceSpecification: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of SpatialReferenceSpecification from a dict""" if obj is None: return None if not isinstance(obj, dict): - return SpatialReferenceSpecification.parse_obj(obj) + return cls.model_validate(obj) - _obj = SpatialReferenceSpecification.parse_obj({ - "axis_labels": obj.get("axisLabels"), - "axis_order": obj.get("axisOrder"), - "extent": BoundingBox2D.from_dict(obj.get("extent")) if obj.get("extent") is not None else None, + _obj = cls.model_validate({ + "axisLabels": obj.get("axisLabels"), + "axisOrder": obj.get("axisOrder"), + "extent": BoundingBox2D.from_dict(obj["extent"]) if obj.get("extent") is not None else None, "name": obj.get("name"), - "proj_string": obj.get("projString"), - "spatial_reference": obj.get("spatialReference") + "projString": obj.get("projString"), + "spatialReference": obj.get("spatialReference") }) return _obj diff --git a/python/geoengine_openapi_client/models/spatial_resolution.py b/python/geoengine_openapi_client/models/spatial_resolution.py index 56f83660..3287be67 100644 --- a/python/geoengine_openapi_client/models/spatial_resolution.py +++ b/python/geoengine_openapi_client/models/spatial_resolution.py @@ -18,54 +18,70 @@ import re # noqa: F401 import json - -from typing import Union -from pydantic import BaseModel, Field, StrictFloat, StrictInt +from pydantic import BaseModel, ConfigDict, StrictFloat, StrictInt +from typing import Any, ClassVar, Dict, List, Union +from typing import Optional, Set +from typing_extensions import Self class SpatialResolution(BaseModel): """ - The spatial resolution in SRS units # noqa: E501 - """ - x: Union[StrictFloat, StrictInt] = Field(...) - y: Union[StrictFloat, StrictInt] = Field(...) - __properties = ["x", "y"] + The spatial resolution in SRS units + """ # noqa: E501 + x: Union[StrictFloat, StrictInt] + y: Union[StrictFloat, StrictInt] + __properties: ClassVar[List[str]] = ["x", "y"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> SpatialResolution: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of SpatialResolution from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> SpatialResolution: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of SpatialResolution from a dict""" if obj is None: return None if not isinstance(obj, dict): - return SpatialResolution.parse_obj(obj) + return cls.model_validate(obj) - _obj = SpatialResolution.parse_obj({ + _obj = cls.model_validate({ "x": obj.get("x"), "y": obj.get("y") }) diff --git a/python/geoengine_openapi_client/models/st_rectangle.py b/python/geoengine_openapi_client/models/st_rectangle.py index 54ed6e3e..9f76d559 100644 --- a/python/geoengine_openapi_client/models/st_rectangle.py +++ b/python/geoengine_openapi_client/models/st_rectangle.py @@ -18,45 +18,61 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List from geoengine_openapi_client.models.bounding_box2_d import BoundingBox2D from geoengine_openapi_client.models.time_interval import TimeInterval +from typing import Optional, Set +from typing_extensions import Self class STRectangle(BaseModel): """ STRectangle - """ - bounding_box: BoundingBox2D = Field(..., alias="boundingBox") - spatial_reference: StrictStr = Field(..., alias="spatialReference") - time_interval: TimeInterval = Field(..., alias="timeInterval") - __properties = ["boundingBox", "spatialReference", "timeInterval"] + """ # noqa: E501 + bounding_box: BoundingBox2D = Field(alias="boundingBox") + spatial_reference: StrictStr = Field(alias="spatialReference") + time_interval: TimeInterval = Field(alias="timeInterval") + __properties: ClassVar[List[str]] = ["boundingBox", "spatialReference", "timeInterval"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> STRectangle: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of STRectangle from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of bounding_box if self.bounding_box: _dict['boundingBox'] = self.bounding_box.to_dict() @@ -66,18 +82,18 @@ def to_dict(self): return _dict @classmethod - def from_dict(cls, obj: dict) -> STRectangle: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of STRectangle from a dict""" if obj is None: return None if not isinstance(obj, dict): - return STRectangle.parse_obj(obj) + return cls.model_validate(obj) - _obj = STRectangle.parse_obj({ - "bounding_box": BoundingBox2D.from_dict(obj.get("boundingBox")) if obj.get("boundingBox") is not None else None, - "spatial_reference": obj.get("spatialReference"), - "time_interval": TimeInterval.from_dict(obj.get("timeInterval")) if obj.get("timeInterval") is not None else None + _obj = cls.model_validate({ + "boundingBox": BoundingBox2D.from_dict(obj["boundingBox"]) if obj.get("boundingBox") is not None else None, + "spatialReference": obj.get("spatialReference"), + "timeInterval": TimeInterval.from_dict(obj["timeInterval"]) if obj.get("timeInterval") is not None else None }) return _obj diff --git a/python/geoengine_openapi_client/models/static_meta_data.py b/python/geoengine_openapi_client/models/static_meta_data.py new file mode 100644 index 00000000..ec84f783 --- /dev/null +++ b/python/geoengine_openapi_client/models/static_meta_data.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +""" + Geo Engine API + + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) + + The version of the OpenAPI document: 0.8.0 + Contact: dev@geoengine.de + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from geoengine_openapi_client.models.ogr_source_dataset import OgrSourceDataset +from geoengine_openapi_client.models.vector_result_descriptor import VectorResultDescriptor +from typing import Optional, Set +from typing_extensions import Self + +class StaticMetaData(BaseModel): + """ + StaticMetaData + """ # noqa: E501 + loading_info: OgrSourceDataset = Field(alias="loadingInfo") + result_descriptor: VectorResultDescriptor = Field(alias="resultDescriptor") + type: StrictStr + __properties: ClassVar[List[str]] = ["loadingInfo", "resultDescriptor", "type"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['OgrMetaData']): + raise ValueError("must be one of enum values ('OgrMetaData')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of StaticMetaData from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of loading_info + if self.loading_info: + _dict['loadingInfo'] = self.loading_info.to_dict() + # override the default output from pydantic by calling `to_dict()` of result_descriptor + if self.result_descriptor: + _dict['resultDescriptor'] = self.result_descriptor.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of StaticMetaData from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "loadingInfo": OgrSourceDataset.from_dict(obj["loadingInfo"]) if obj.get("loadingInfo") is not None else None, + "resultDescriptor": VectorResultDescriptor.from_dict(obj["resultDescriptor"]) if obj.get("resultDescriptor") is not None else None, + "type": obj.get("type") + }) + return _obj + + diff --git a/python/geoengine_openapi_client/models/static_number_param.py b/python/geoengine_openapi_client/models/static_number_param.py index ad096c79..d7eb4358 100644 --- a/python/geoengine_openapi_client/models/static_number_param.py +++ b/python/geoengine_openapi_client/models/static_number_param.py @@ -18,61 +18,78 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr, conint, validator +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self class StaticNumberParam(BaseModel): """ StaticNumberParam - """ - type: StrictStr = Field(...) - value: conint(strict=True, ge=0) = Field(...) - __properties = ["type", "value"] + """ # noqa: E501 + type: StrictStr + value: Annotated[int, Field(strict=True, ge=0)] + __properties: ClassVar[List[str]] = ["type", "value"] - @validator('type') + @field_validator('type') def type_validate_enum(cls, value): """Validates the enum""" - if value not in ('static', 'derived'): - raise ValueError("must be one of enum values ('static', 'derived')") + if value not in set(['static']): + raise ValueError("must be one of enum values ('static')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> StaticNumberParam: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of StaticNumberParam from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> StaticNumberParam: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of StaticNumberParam from a dict""" if obj is None: return None if not isinstance(obj, dict): - return StaticNumberParam.parse_obj(obj) + return cls.model_validate(obj) - _obj = StaticNumberParam.parse_obj({ + _obj = cls.model_validate({ "type": obj.get("type"), "value": obj.get("value") }) diff --git a/python/geoengine_openapi_client/models/stroke_param.py b/python/geoengine_openapi_client/models/stroke_param.py index 208624fd..d5e167a1 100644 --- a/python/geoengine_openapi_client/models/stroke_param.py +++ b/python/geoengine_openapi_client/models/stroke_param.py @@ -18,44 +18,60 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List from geoengine_openapi_client.models.color_param import ColorParam from geoengine_openapi_client.models.number_param import NumberParam +from typing import Optional, Set +from typing_extensions import Self class StrokeParam(BaseModel): """ StrokeParam - """ - color: ColorParam = Field(...) - width: NumberParam = Field(...) - __properties = ["color", "width"] + """ # noqa: E501 + color: ColorParam + width: NumberParam + __properties: ClassVar[List[str]] = ["color", "width"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> StrokeParam: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of StrokeParam from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of color if self.color: _dict['color'] = self.color.to_dict() @@ -65,17 +81,17 @@ def to_dict(self): return _dict @classmethod - def from_dict(cls, obj: dict) -> StrokeParam: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of StrokeParam from a dict""" if obj is None: return None if not isinstance(obj, dict): - return StrokeParam.parse_obj(obj) + return cls.model_validate(obj) - _obj = StrokeParam.parse_obj({ - "color": ColorParam.from_dict(obj.get("color")) if obj.get("color") is not None else None, - "width": NumberParam.from_dict(obj.get("width")) if obj.get("width") is not None else None + _obj = cls.model_validate({ + "color": ColorParam.from_dict(obj["color"]) if obj.get("color") is not None else None, + "width": NumberParam.from_dict(obj["width"]) if obj.get("width") is not None else None }) return _obj diff --git a/python/geoengine_openapi_client/models/suggest_meta_data.py b/python/geoengine_openapi_client/models/suggest_meta_data.py index 330d7b3b..e5075b7c 100644 --- a/python/geoengine_openapi_client/models/suggest_meta_data.py +++ b/python/geoengine_openapi_client/models/suggest_meta_data.py @@ -18,72 +18,88 @@ import re # noqa: F401 import json - -from typing import Optional -from pydantic import BaseModel, Field, StrictStr +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional from geoengine_openapi_client.models.data_path import DataPath +from typing import Optional, Set +from typing_extensions import Self class SuggestMetaData(BaseModel): """ SuggestMetaData - """ - data_path: DataPath = Field(..., alias="dataPath") - layer_name: Optional[StrictStr] = Field(None, alias="layerName") - main_file: Optional[StrictStr] = Field(None, alias="mainFile") - __properties = ["dataPath", "layerName", "mainFile"] + """ # noqa: E501 + data_path: DataPath = Field(alias="dataPath") + layer_name: Optional[StrictStr] = Field(default=None, alias="layerName") + main_file: Optional[StrictStr] = Field(default=None, alias="mainFile") + __properties: ClassVar[List[str]] = ["dataPath", "layerName", "mainFile"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> SuggestMetaData: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of SuggestMetaData from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of data_path if self.data_path: _dict['dataPath'] = self.data_path.to_dict() # set to None if layer_name (nullable) is None - # and __fields_set__ contains the field - if self.layer_name is None and "layer_name" in self.__fields_set__: + # and model_fields_set contains the field + if self.layer_name is None and "layer_name" in self.model_fields_set: _dict['layerName'] = None # set to None if main_file (nullable) is None - # and __fields_set__ contains the field - if self.main_file is None and "main_file" in self.__fields_set__: + # and model_fields_set contains the field + if self.main_file is None and "main_file" in self.model_fields_set: _dict['mainFile'] = None return _dict @classmethod - def from_dict(cls, obj: dict) -> SuggestMetaData: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of SuggestMetaData from a dict""" if obj is None: return None if not isinstance(obj, dict): - return SuggestMetaData.parse_obj(obj) + return cls.model_validate(obj) - _obj = SuggestMetaData.parse_obj({ - "data_path": DataPath.from_dict(obj.get("dataPath")) if obj.get("dataPath") is not None else None, - "layer_name": obj.get("layerName"), - "main_file": obj.get("mainFile") + _obj = cls.model_validate({ + "dataPath": DataPath.from_dict(obj["dataPath"]) if obj.get("dataPath") is not None else None, + "layerName": obj.get("layerName"), + "mainFile": obj.get("mainFile") }) return _obj diff --git a/python/geoengine_openapi_client/models/symbology.py b/python/geoengine_openapi_client/models/symbology.py index 3aa9cda5..2099ae20 100644 --- a/python/geoengine_openapi_client/models/symbology.py +++ b/python/geoengine_openapi_client/models/symbology.py @@ -14,19 +14,17 @@ from __future__ import annotations -from inspect import getfullargspec import json import pprint -import re # noqa: F401 - +from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator from typing import Any, List, Optional -from pydantic import BaseModel, Field, StrictStr, ValidationError, validator from geoengine_openapi_client.models.line_symbology import LineSymbology from geoengine_openapi_client.models.point_symbology import PointSymbology from geoengine_openapi_client.models.polygon_symbology import PolygonSymbology from geoengine_openapi_client.models.raster_symbology import RasterSymbology -from typing import Union, Any, List, TYPE_CHECKING from pydantic import StrictStr, Field +from typing import Union, List, Set, Optional, Dict +from typing_extensions import Literal, Self SYMBOLOGY_ONE_OF_SCHEMAS = ["LineSymbology", "PointSymbology", "PolygonSymbology", "RasterSymbology"] @@ -42,16 +40,16 @@ class Symbology(BaseModel): oneof_schema_3_validator: Optional[LineSymbology] = None # data type: PolygonSymbology oneof_schema_4_validator: Optional[PolygonSymbology] = None - if TYPE_CHECKING: - actual_instance: Union[LineSymbology, PointSymbology, PolygonSymbology, RasterSymbology] - else: - actual_instance: Any - one_of_schemas: List[str] = Field(SYMBOLOGY_ONE_OF_SCHEMAS, const=True) + actual_instance: Optional[Union[LineSymbology, PointSymbology, PolygonSymbology, RasterSymbology]] = None + one_of_schemas: Set[str] = { "LineSymbology", "PointSymbology", "PolygonSymbology", "RasterSymbology" } + + model_config = ConfigDict( + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - validate_assignment = True - discriminator_value_class_map = { + discriminator_value_class_map: Dict[str, str] = { } def __init__(self, *args, **kwargs) -> None: @@ -64,9 +62,9 @@ def __init__(self, *args, **kwargs) -> None: else: super().__init__(**kwargs) - @validator('actual_instance') + @field_validator('actual_instance') def actual_instance_must_validate_oneof(cls, v): - instance = Symbology.construct() + instance = Symbology.model_construct() error_messages = [] match = 0 # validate data type: RasterSymbology @@ -99,13 +97,13 @@ def actual_instance_must_validate_oneof(cls, v): return v @classmethod - def from_dict(cls, obj: dict) -> Symbology: + def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: return cls.from_json(json.dumps(obj)) @classmethod - def from_json(cls, json_str: str) -> Symbology: + def from_json(cls, json_str: str) -> Self: """Returns the object represented by the json string""" - instance = Symbology.construct() + instance = cls.model_construct() error_messages = [] match = 0 @@ -115,42 +113,42 @@ def from_json(cls, json_str: str) -> Symbology: raise ValueError("Failed to lookup data type from the field `type` in the input.") # check if data type is `LineSymbology` - if _data_type == "LineSymbology": + if _data_type == "line": instance.actual_instance = LineSymbology.from_json(json_str) return instance # check if data type is `PointSymbology` - if _data_type == "PointSymbology": + if _data_type == "point": instance.actual_instance = PointSymbology.from_json(json_str) return instance # check if data type is `PolygonSymbology` - if _data_type == "PolygonSymbology": + if _data_type == "polygon": instance.actual_instance = PolygonSymbology.from_json(json_str) return instance # check if data type is `RasterSymbology` - if _data_type == "RasterSymbology": + if _data_type == "raster": instance.actual_instance = RasterSymbology.from_json(json_str) return instance # check if data type is `LineSymbology` - if _data_type == "line": + if _data_type == "LineSymbology": instance.actual_instance = LineSymbology.from_json(json_str) return instance # check if data type is `PointSymbology` - if _data_type == "point": + if _data_type == "PointSymbology": instance.actual_instance = PointSymbology.from_json(json_str) return instance # check if data type is `PolygonSymbology` - if _data_type == "polygon": + if _data_type == "PolygonSymbology": instance.actual_instance = PolygonSymbology.from_json(json_str) return instance # check if data type is `RasterSymbology` - if _data_type == "raster": + if _data_type == "RasterSymbology": instance.actual_instance = RasterSymbology.from_json(json_str) return instance @@ -193,19 +191,17 @@ def to_json(self) -> str: if self.actual_instance is None: return "null" - to_json = getattr(self.actual_instance, "to_json", None) - if callable(to_json): + if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): return self.actual_instance.to_json() else: return json.dumps(self.actual_instance) - def to_dict(self) -> dict: + def to_dict(self) -> Optional[Union[Dict[str, Any], LineSymbology, PointSymbology, PolygonSymbology, RasterSymbology]]: """Returns the dict representation of the actual instance""" if self.actual_instance is None: return None - to_dict = getattr(self.actual_instance, "to_dict", None) - if callable(to_dict): + if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): return self.actual_instance.to_dict() else: # primitive type @@ -213,6 +209,6 @@ def to_dict(self) -> dict: def to_str(self) -> str: """Returns the string representation of the actual instance""" - return pprint.pformat(self.dict()) + return pprint.pformat(self.model_dump()) diff --git a/python/geoengine_openapi_client/models/task_abort_options.py b/python/geoengine_openapi_client/models/task_abort_options.py index 63a37e04..894f2e32 100644 --- a/python/geoengine_openapi_client/models/task_abort_options.py +++ b/python/geoengine_openapi_client/models/task_abort_options.py @@ -18,53 +18,69 @@ import re # noqa: F401 import json - -from typing import Optional -from pydantic import BaseModel, StrictBool +from pydantic import BaseModel, ConfigDict, StrictBool +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self class TaskAbortOptions(BaseModel): """ TaskAbortOptions - """ + """ # noqa: E501 force: Optional[StrictBool] = None - __properties = ["force"] + __properties: ClassVar[List[str]] = ["force"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> TaskAbortOptions: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of TaskAbortOptions from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> TaskAbortOptions: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of TaskAbortOptions from a dict""" if obj is None: return None if not isinstance(obj, dict): - return TaskAbortOptions.parse_obj(obj) + return cls.model_validate(obj) - _obj = TaskAbortOptions.parse_obj({ + _obj = cls.model_validate({ "force": obj.get("force") }) return _obj diff --git a/python/geoengine_openapi_client/models/task_filter.py b/python/geoengine_openapi_client/models/task_filter.py index 37793161..0f43c3a5 100644 --- a/python/geoengine_openapi_client/models/task_filter.py +++ b/python/geoengine_openapi_client/models/task_filter.py @@ -13,13 +13,10 @@ """ # noqa: E501 +from __future__ import annotations import json -import pprint -import re # noqa: F401 -from aenum import Enum, no_arg - - - +from enum import Enum +from typing_extensions import Self class TaskFilter(str, Enum): @@ -36,8 +33,8 @@ class TaskFilter(str, Enum): COMPLETED = 'completed' @classmethod - def from_json(cls, json_str: str) -> TaskFilter: + def from_json(cls, json_str: str) -> Self: """Create an instance of TaskFilter from a JSON string""" - return TaskFilter(json.loads(json_str)) + return cls(json.loads(json_str)) diff --git a/python/geoengine_openapi_client/models/task_list_options.py b/python/geoengine_openapi_client/models/task_list_options.py index d78b15be..050ba8c4 100644 --- a/python/geoengine_openapi_client/models/task_list_options.py +++ b/python/geoengine_openapi_client/models/task_list_options.py @@ -18,61 +18,78 @@ import re # noqa: F401 import json - -from typing import Optional -from pydantic import BaseModel, conint +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from typing_extensions import Annotated from geoengine_openapi_client.models.task_filter import TaskFilter +from typing import Optional, Set +from typing_extensions import Self class TaskListOptions(BaseModel): """ TaskListOptions - """ + """ # noqa: E501 filter: Optional[TaskFilter] = None - limit: Optional[conint(strict=True, ge=0)] = None - offset: Optional[conint(strict=True, ge=0)] = None - __properties = ["filter", "limit", "offset"] + limit: Optional[Annotated[int, Field(strict=True, ge=0)]] = None + offset: Optional[Annotated[int, Field(strict=True, ge=0)]] = None + __properties: ClassVar[List[str]] = ["filter", "limit", "offset"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> TaskListOptions: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of TaskListOptions from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # set to None if filter (nullable) is None - # and __fields_set__ contains the field - if self.filter is None and "filter" in self.__fields_set__: + # and model_fields_set contains the field + if self.filter is None and "filter" in self.model_fields_set: _dict['filter'] = None return _dict @classmethod - def from_dict(cls, obj: dict) -> TaskListOptions: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of TaskListOptions from a dict""" if obj is None: return None if not isinstance(obj, dict): - return TaskListOptions.parse_obj(obj) + return cls.model_validate(obj) - _obj = TaskListOptions.parse_obj({ + _obj = cls.model_validate({ "filter": obj.get("filter"), "limit": obj.get("limit"), "offset": obj.get("offset") diff --git a/python/geoengine_openapi_client/models/task_response.py b/python/geoengine_openapi_client/models/task_response.py index ecae47a5..ed116e54 100644 --- a/python/geoengine_openapi_client/models/task_response.py +++ b/python/geoengine_openapi_client/models/task_response.py @@ -18,54 +18,70 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self class TaskResponse(BaseModel): """ - Create a task somewhere and respond with a task id to query the task status. # noqa: E501 - """ - task_id: StrictStr = Field(..., alias="taskId") - __properties = ["taskId"] + Create a task somewhere and respond with a task id to query the task status. + """ # noqa: E501 + task_id: StrictStr = Field(alias="taskId") + __properties: ClassVar[List[str]] = ["taskId"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> TaskResponse: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of TaskResponse from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> TaskResponse: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of TaskResponse from a dict""" if obj is None: return None if not isinstance(obj, dict): - return TaskResponse.parse_obj(obj) + return cls.model_validate(obj) - _obj = TaskResponse.parse_obj({ - "task_id": obj.get("taskId") + _obj = cls.model_validate({ + "taskId": obj.get("taskId") }) return _obj diff --git a/python/geoengine_openapi_client/models/task_status.py b/python/geoengine_openapi_client/models/task_status.py index 9103730f..3f6dddbc 100644 --- a/python/geoengine_openapi_client/models/task_status.py +++ b/python/geoengine_openapi_client/models/task_status.py @@ -14,19 +14,17 @@ from __future__ import annotations -from inspect import getfullargspec import json import pprint -import re # noqa: F401 - +from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator from typing import Any, List, Optional -from pydantic import BaseModel, Field, StrictStr, ValidationError, validator from geoengine_openapi_client.models.task_status_aborted import TaskStatusAborted from geoengine_openapi_client.models.task_status_completed import TaskStatusCompleted from geoengine_openapi_client.models.task_status_failed import TaskStatusFailed from geoengine_openapi_client.models.task_status_running import TaskStatusRunning -from typing import Union, Any, List, TYPE_CHECKING from pydantic import StrictStr, Field +from typing import Union, List, Set, Optional, Dict +from typing_extensions import Literal, Self TASKSTATUS_ONE_OF_SCHEMAS = ["TaskStatusAborted", "TaskStatusCompleted", "TaskStatusFailed", "TaskStatusRunning"] @@ -42,16 +40,16 @@ class TaskStatus(BaseModel): oneof_schema_3_validator: Optional[TaskStatusAborted] = None # data type: TaskStatusFailed oneof_schema_4_validator: Optional[TaskStatusFailed] = None - if TYPE_CHECKING: - actual_instance: Union[TaskStatusAborted, TaskStatusCompleted, TaskStatusFailed, TaskStatusRunning] - else: - actual_instance: Any - one_of_schemas: List[str] = Field(TASKSTATUS_ONE_OF_SCHEMAS, const=True) + actual_instance: Optional[Union[TaskStatusAborted, TaskStatusCompleted, TaskStatusFailed, TaskStatusRunning]] = None + one_of_schemas: Set[str] = { "TaskStatusAborted", "TaskStatusCompleted", "TaskStatusFailed", "TaskStatusRunning" } + + model_config = ConfigDict( + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - validate_assignment = True - discriminator_value_class_map = { + discriminator_value_class_map: Dict[str, str] = { 'TaskStatusWithId': 'TaskStatusWithId' } @@ -65,9 +63,9 @@ def __init__(self, *args, **kwargs) -> None: else: super().__init__(**kwargs) - @validator('actual_instance') + @field_validator('actual_instance') def actual_instance_must_validate_oneof(cls, v): - instance = TaskStatus.construct() + instance = TaskStatus.model_construct() error_messages = [] match = 0 # validate data type: TaskStatusRunning @@ -100,13 +98,13 @@ def actual_instance_must_validate_oneof(cls, v): return v @classmethod - def from_dict(cls, obj: dict) -> TaskStatus: + def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: return cls.from_json(json.dumps(obj)) @classmethod - def from_json(cls, json_str: str) -> TaskStatus: + def from_json(cls, json_str: str) -> Self: """Returns the object represented by the json string""" - instance = TaskStatus.construct() + instance = cls.model_construct() error_messages = [] match = 0 @@ -116,50 +114,50 @@ def from_json(cls, json_str: str) -> TaskStatus: raise ValueError("Failed to lookup data type from the field `status` in the input.") # check if data type is `TaskStatusAborted` - if _data_type == "TaskStatusAborted": + if _data_type == "aborted": instance.actual_instance = TaskStatusAborted.from_json(json_str) return instance # check if data type is `TaskStatusCompleted` - if _data_type == "TaskStatusCompleted": + if _data_type == "completed": instance.actual_instance = TaskStatusCompleted.from_json(json_str) return instance # check if data type is `TaskStatusFailed` - if _data_type == "TaskStatusFailed": + if _data_type == "failed": instance.actual_instance = TaskStatusFailed.from_json(json_str) return instance # check if data type is `TaskStatusRunning` - if _data_type == "TaskStatusRunning": + if _data_type == "running": instance.actual_instance = TaskStatusRunning.from_json(json_str) return instance - # check if data type is `TaskStatusWithId` - if _data_type == "TaskStatusWithId": - instance.actual_instance = TaskStatusWithId.from_json(json_str) - return instance - # check if data type is `TaskStatusAborted` - if _data_type == "aborted": + if _data_type == "TaskStatusAborted": instance.actual_instance = TaskStatusAborted.from_json(json_str) return instance # check if data type is `TaskStatusCompleted` - if _data_type == "completed": + if _data_type == "TaskStatusCompleted": instance.actual_instance = TaskStatusCompleted.from_json(json_str) return instance # check if data type is `TaskStatusFailed` - if _data_type == "failed": + if _data_type == "TaskStatusFailed": instance.actual_instance = TaskStatusFailed.from_json(json_str) return instance # check if data type is `TaskStatusRunning` - if _data_type == "running": + if _data_type == "TaskStatusRunning": instance.actual_instance = TaskStatusRunning.from_json(json_str) return instance + # check if data type is `TaskStatusWithId` + if _data_type == "TaskStatusWithId": + instance.actual_instance = TaskStatusWithId.from_json(json_str) + return instance + # deserialize data into TaskStatusRunning try: instance.actual_instance = TaskStatusRunning.from_json(json_str) @@ -199,19 +197,17 @@ def to_json(self) -> str: if self.actual_instance is None: return "null" - to_json = getattr(self.actual_instance, "to_json", None) - if callable(to_json): + if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): return self.actual_instance.to_json() else: return json.dumps(self.actual_instance) - def to_dict(self) -> dict: + def to_dict(self) -> Optional[Union[Dict[str, Any], TaskStatusAborted, TaskStatusCompleted, TaskStatusFailed, TaskStatusRunning]]: """Returns the dict representation of the actual instance""" if self.actual_instance is None: return None - to_dict = getattr(self.actual_instance, "to_dict", None) - if callable(to_dict): + if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): return self.actual_instance.to_dict() else: # primitive type @@ -219,6 +215,6 @@ def to_dict(self) -> dict: def to_str(self) -> str: """Returns the string representation of the actual instance""" - return pprint.pformat(self.dict()) + return pprint.pformat(self.model_dump()) diff --git a/python/geoengine_openapi_client/models/task_status_aborted.py b/python/geoengine_openapi_client/models/task_status_aborted.py index 0e9d29ee..956c6ef2 100644 --- a/python/geoengine_openapi_client/models/task_status_aborted.py +++ b/python/geoengine_openapi_client/models/task_status_aborted.py @@ -18,67 +18,83 @@ import re # noqa: F401 import json - -from typing import Any, Optional -from pydantic import BaseModel, Field, StrictStr, validator +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self class TaskStatusAborted(BaseModel): """ TaskStatusAborted - """ - clean_up: Optional[Any] = Field(..., alias="cleanUp") - status: StrictStr = Field(...) - __properties = ["cleanUp", "status"] + """ # noqa: E501 + clean_up: Optional[Any] = Field(alias="cleanUp") + status: StrictStr + __properties: ClassVar[List[str]] = ["cleanUp", "status"] - @validator('status') + @field_validator('status') def status_validate_enum(cls, value): """Validates the enum""" - if value not in ('aborted'): + if value not in set(['aborted']): raise ValueError("must be one of enum values ('aborted')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> TaskStatusAborted: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of TaskStatusAborted from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # set to None if clean_up (nullable) is None - # and __fields_set__ contains the field - if self.clean_up is None and "clean_up" in self.__fields_set__: + # and model_fields_set contains the field + if self.clean_up is None and "clean_up" in self.model_fields_set: _dict['cleanUp'] = None return _dict @classmethod - def from_dict(cls, obj: dict) -> TaskStatusAborted: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of TaskStatusAborted from a dict""" if obj is None: return None if not isinstance(obj, dict): - return TaskStatusAborted.parse_obj(obj) + return cls.model_validate(obj) - _obj = TaskStatusAborted.parse_obj({ - "clean_up": obj.get("cleanUp"), + _obj = cls.model_validate({ + "cleanUp": obj.get("cleanUp"), "status": obj.get("status") }) return _obj diff --git a/python/geoengine_openapi_client/models/task_status_completed.py b/python/geoengine_openapi_client/models/task_status_completed.py index a859dab7..3283b8cd 100644 --- a/python/geoengine_openapi_client/models/task_status_completed.py +++ b/python/geoengine_openapi_client/models/task_status_completed.py @@ -18,76 +18,92 @@ import re # noqa: F401 import json - -from typing import Any, Optional -from pydantic import BaseModel, Field, StrictStr, validator +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self class TaskStatusCompleted(BaseModel): """ TaskStatusCompleted - """ + """ # noqa: E501 description: Optional[StrictStr] = None info: Optional[Any] = None - status: StrictStr = Field(...) - task_type: StrictStr = Field(..., alias="taskType") - time_started: StrictStr = Field(..., alias="timeStarted") - time_total: StrictStr = Field(..., alias="timeTotal") - __properties = ["description", "info", "status", "taskType", "timeStarted", "timeTotal"] + status: StrictStr + task_type: StrictStr = Field(alias="taskType") + time_started: StrictStr = Field(alias="timeStarted") + time_total: StrictStr = Field(alias="timeTotal") + __properties: ClassVar[List[str]] = ["description", "info", "status", "taskType", "timeStarted", "timeTotal"] - @validator('status') + @field_validator('status') def status_validate_enum(cls, value): """Validates the enum""" - if value not in ('completed'): + if value not in set(['completed']): raise ValueError("must be one of enum values ('completed')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> TaskStatusCompleted: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of TaskStatusCompleted from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # set to None if info (nullable) is None - # and __fields_set__ contains the field - if self.info is None and "info" in self.__fields_set__: + # and model_fields_set contains the field + if self.info is None and "info" in self.model_fields_set: _dict['info'] = None return _dict @classmethod - def from_dict(cls, obj: dict) -> TaskStatusCompleted: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of TaskStatusCompleted from a dict""" if obj is None: return None if not isinstance(obj, dict): - return TaskStatusCompleted.parse_obj(obj) + return cls.model_validate(obj) - _obj = TaskStatusCompleted.parse_obj({ + _obj = cls.model_validate({ "description": obj.get("description"), "info": obj.get("info"), "status": obj.get("status"), - "task_type": obj.get("taskType"), - "time_started": obj.get("timeStarted"), - "time_total": obj.get("timeTotal") + "taskType": obj.get("taskType"), + "timeStarted": obj.get("timeStarted"), + "timeTotal": obj.get("timeTotal") }) return _obj diff --git a/python/geoengine_openapi_client/models/task_status_failed.py b/python/geoengine_openapi_client/models/task_status_failed.py index 6c664cc7..13cc7470 100644 --- a/python/geoengine_openapi_client/models/task_status_failed.py +++ b/python/geoengine_openapi_client/models/task_status_failed.py @@ -18,73 +18,89 @@ import re # noqa: F401 import json - -from typing import Any, Optional -from pydantic import BaseModel, Field, StrictStr, validator +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self class TaskStatusFailed(BaseModel): """ TaskStatusFailed - """ - clean_up: Optional[Any] = Field(..., alias="cleanUp") - error: Optional[Any] = Field(...) - status: StrictStr = Field(...) - __properties = ["cleanUp", "error", "status"] + """ # noqa: E501 + clean_up: Optional[Any] = Field(alias="cleanUp") + error: Optional[Any] + status: StrictStr + __properties: ClassVar[List[str]] = ["cleanUp", "error", "status"] - @validator('status') + @field_validator('status') def status_validate_enum(cls, value): """Validates the enum""" - if value not in ('failed'): + if value not in set(['failed']): raise ValueError("must be one of enum values ('failed')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> TaskStatusFailed: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of TaskStatusFailed from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # set to None if clean_up (nullable) is None - # and __fields_set__ contains the field - if self.clean_up is None and "clean_up" in self.__fields_set__: + # and model_fields_set contains the field + if self.clean_up is None and "clean_up" in self.model_fields_set: _dict['cleanUp'] = None # set to None if error (nullable) is None - # and __fields_set__ contains the field - if self.error is None and "error" in self.__fields_set__: + # and model_fields_set contains the field + if self.error is None and "error" in self.model_fields_set: _dict['error'] = None return _dict @classmethod - def from_dict(cls, obj: dict) -> TaskStatusFailed: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of TaskStatusFailed from a dict""" if obj is None: return None if not isinstance(obj, dict): - return TaskStatusFailed.parse_obj(obj) + return cls.model_validate(obj) - _obj = TaskStatusFailed.parse_obj({ - "clean_up": obj.get("cleanUp"), + _obj = cls.model_validate({ + "cleanUp": obj.get("cleanUp"), "error": obj.get("error"), "status": obj.get("status") }) diff --git a/python/geoengine_openapi_client/models/task_status_running.py b/python/geoengine_openapi_client/models/task_status_running.py index c3474069..98cdf6c0 100644 --- a/python/geoengine_openapi_client/models/task_status_running.py +++ b/python/geoengine_openapi_client/models/task_status_running.py @@ -18,78 +18,94 @@ import re # noqa: F401 import json - -from typing import Any, Optional -from pydantic import BaseModel, Field, StrictStr, validator +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self class TaskStatusRunning(BaseModel): """ TaskStatusRunning - """ + """ # noqa: E501 description: Optional[StrictStr] = None - estimated_time_remaining: StrictStr = Field(..., alias="estimatedTimeRemaining") + estimated_time_remaining: StrictStr = Field(alias="estimatedTimeRemaining") info: Optional[Any] = None - pct_complete: StrictStr = Field(..., alias="pctComplete") - status: StrictStr = Field(...) - task_type: StrictStr = Field(..., alias="taskType") - time_started: StrictStr = Field(..., alias="timeStarted") - __properties = ["description", "estimatedTimeRemaining", "info", "pctComplete", "status", "taskType", "timeStarted"] + pct_complete: StrictStr = Field(alias="pctComplete") + status: StrictStr + task_type: StrictStr = Field(alias="taskType") + time_started: StrictStr = Field(alias="timeStarted") + __properties: ClassVar[List[str]] = ["description", "estimatedTimeRemaining", "info", "pctComplete", "status", "taskType", "timeStarted"] - @validator('status') + @field_validator('status') def status_validate_enum(cls, value): """Validates the enum""" - if value not in ('running'): + if value not in set(['running']): raise ValueError("must be one of enum values ('running')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> TaskStatusRunning: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of TaskStatusRunning from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # set to None if info (nullable) is None - # and __fields_set__ contains the field - if self.info is None and "info" in self.__fields_set__: + # and model_fields_set contains the field + if self.info is None and "info" in self.model_fields_set: _dict['info'] = None return _dict @classmethod - def from_dict(cls, obj: dict) -> TaskStatusRunning: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of TaskStatusRunning from a dict""" if obj is None: return None if not isinstance(obj, dict): - return TaskStatusRunning.parse_obj(obj) + return cls.model_validate(obj) - _obj = TaskStatusRunning.parse_obj({ + _obj = cls.model_validate({ "description": obj.get("description"), - "estimated_time_remaining": obj.get("estimatedTimeRemaining"), + "estimatedTimeRemaining": obj.get("estimatedTimeRemaining"), "info": obj.get("info"), - "pct_complete": obj.get("pctComplete"), + "pctComplete": obj.get("pctComplete"), "status": obj.get("status"), - "task_type": obj.get("taskType"), - "time_started": obj.get("timeStarted") + "taskType": obj.get("taskType"), + "timeStarted": obj.get("timeStarted") }) return _obj diff --git a/python/geoengine_openapi_client/models/task_status_with_id.py b/python/geoengine_openapi_client/models/task_status_with_id.py index 2ec27f20..db33da7e 100644 --- a/python/geoengine_openapi_client/models/task_status_with_id.py +++ b/python/geoengine_openapi_client/models/task_status_with_id.py @@ -18,56 +18,72 @@ import re # noqa: F401 import json - - -from pydantic import Field, StrictStr +from pydantic import ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List from geoengine_openapi_client.models.task_status import TaskStatus +from typing import Optional, Set +from typing_extensions import Self class TaskStatusWithId(TaskStatus): """ TaskStatusWithId - """ - task_id: StrictStr = Field(..., alias="taskId") - __properties = ["description", "estimatedTimeRemaining", "info", "pctComplete", "status", "taskType", "timeStarted", "timeTotal", "cleanUp", "error", "taskId"] + """ # noqa: E501 + task_id: StrictStr = Field(alias="taskId") + __properties: ClassVar[List[str]] = ["description", "estimatedTimeRemaining", "info", "pctComplete", "status", "taskType", "timeStarted", "timeTotal", "cleanUp", "error", "taskId"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> TaskStatusWithId: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of TaskStatusWithId from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # set to None if info (nullable) is None - # and __fields_set__ contains the field + # and model_fields_set contains the field # Note: fixed handling of actual_instance if getattr(self.actual_instance, "info", None) is None and "info" in self.actual_instance.__fields_set__: _dict['info'] = None # set to None if clean_up (nullable) is None - # and __fields_set__ contains the field + # and model_fields_set contains the field # Note: fixed handling of actual_instance if getattr(self.actual_instance, "clean_up", None) is None and "clean_up" in self.actual_instance.__fields_set__: _dict['cleanUp'] = None # set to None if error (nullable) is None - # and __fields_set__ contains the field + # and model_fields_set contains the field # Note: fixed handling of actual_instance if getattr(self.actual_instance, "error", None) is None and "error" in self.actual_instance.__fields_set__: _dict['error'] = None @@ -75,33 +91,26 @@ def to_dict(self): return _dict @classmethod - def from_dict(cls, obj: dict) -> TaskStatusWithId: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of TaskStatusWithId from a dict""" if obj is None: return None if not isinstance(obj, dict): - return TaskStatusWithId.parse_obj(obj) - - # Note: fixed handling of actual_instance - _obj = TaskStatusWithId.parse_obj({ - "actual_instance": TaskStatus.from_dict(obj).actual_instance, - "task_id": obj.get("taskId") - }) - return _obj + return cls.model_validate(obj) - _obj = TaskStatusWithId.parse_obj({ + _obj = cls.model_validate({ "description": obj.get("description"), - "estimated_time_remaining": obj.get("estimatedTimeRemaining"), + "estimatedTimeRemaining": obj.get("estimatedTimeRemaining"), "info": obj.get("info"), - "pct_complete": obj.get("pctComplete"), + "pctComplete": obj.get("pctComplete"), "status": obj.get("status"), - "task_type": obj.get("taskType"), - "time_started": obj.get("timeStarted"), - "time_total": obj.get("timeTotal"), - "clean_up": obj.get("cleanUp"), + "taskType": obj.get("taskType"), + "timeStarted": obj.get("timeStarted"), + "timeTotal": obj.get("timeTotal"), + "cleanUp": obj.get("cleanUp"), "error": obj.get("error"), - "task_id": obj.get("taskId") + "taskId": obj.get("taskId") }) return _obj diff --git a/python/geoengine_openapi_client/models/text_symbology.py b/python/geoengine_openapi_client/models/text_symbology.py index 3d452148..191f54d6 100644 --- a/python/geoengine_openapi_client/models/text_symbology.py +++ b/python/geoengine_openapi_client/models/text_symbology.py @@ -18,45 +18,61 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List from geoengine_openapi_client.models.color_param import ColorParam from geoengine_openapi_client.models.stroke_param import StrokeParam +from typing import Optional, Set +from typing_extensions import Self class TextSymbology(BaseModel): """ TextSymbology - """ - attribute: StrictStr = Field(...) - fill_color: ColorParam = Field(..., alias="fillColor") - stroke: StrokeParam = Field(...) - __properties = ["attribute", "fillColor", "stroke"] + """ # noqa: E501 + attribute: StrictStr + fill_color: ColorParam = Field(alias="fillColor") + stroke: StrokeParam + __properties: ClassVar[List[str]] = ["attribute", "fillColor", "stroke"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> TextSymbology: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of TextSymbology from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of fill_color if self.fill_color: _dict['fillColor'] = self.fill_color.to_dict() @@ -66,18 +82,18 @@ def to_dict(self): return _dict @classmethod - def from_dict(cls, obj: dict) -> TextSymbology: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of TextSymbology from a dict""" if obj is None: return None if not isinstance(obj, dict): - return TextSymbology.parse_obj(obj) + return cls.model_validate(obj) - _obj = TextSymbology.parse_obj({ + _obj = cls.model_validate({ "attribute": obj.get("attribute"), - "fill_color": ColorParam.from_dict(obj.get("fillColor")) if obj.get("fillColor") is not None else None, - "stroke": StrokeParam.from_dict(obj.get("stroke")) if obj.get("stroke") is not None else None + "fillColor": ColorParam.from_dict(obj["fillColor"]) if obj.get("fillColor") is not None else None, + "stroke": StrokeParam.from_dict(obj["stroke"]) if obj.get("stroke") is not None else None }) return _obj diff --git a/python/geoengine_openapi_client/models/time_granularity.py b/python/geoengine_openapi_client/models/time_granularity.py index e879ed26..28b357fe 100644 --- a/python/geoengine_openapi_client/models/time_granularity.py +++ b/python/geoengine_openapi_client/models/time_granularity.py @@ -13,13 +13,10 @@ """ # noqa: E501 +from __future__ import annotations import json -import pprint -import re # noqa: F401 -from aenum import Enum, no_arg - - - +from enum import Enum +from typing_extensions import Self class TimeGranularity(str, Enum): @@ -39,8 +36,8 @@ class TimeGranularity(str, Enum): YEARS = 'years' @classmethod - def from_json(cls, json_str: str) -> TimeGranularity: + def from_json(cls, json_str: str) -> Self: """Create an instance of TimeGranularity from a JSON string""" - return TimeGranularity(json.loads(json_str)) + return cls(json.loads(json_str)) diff --git a/python/geoengine_openapi_client/models/time_interval.py b/python/geoengine_openapi_client/models/time_interval.py index ac17f48a..aae8409a 100644 --- a/python/geoengine_openapi_client/models/time_interval.py +++ b/python/geoengine_openapi_client/models/time_interval.py @@ -18,54 +18,70 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictInt +from pydantic import BaseModel, ConfigDict, StrictInt +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self class TimeInterval(BaseModel): """ - Stores time intervals in ms in close-open semantic [start, end) # noqa: E501 - """ - end: StrictInt = Field(...) - start: StrictInt = Field(...) - __properties = ["end", "start"] + Stores time intervals in ms in close-open semantic [start, end) + """ # noqa: E501 + end: StrictInt + start: StrictInt + __properties: ClassVar[List[str]] = ["end", "start"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> TimeInterval: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of TimeInterval from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> TimeInterval: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of TimeInterval from a dict""" if obj is None: return None if not isinstance(obj, dict): - return TimeInterval.parse_obj(obj) + return cls.model_validate(obj) - _obj = TimeInterval.parse_obj({ + _obj = cls.model_validate({ "end": obj.get("end"), "start": obj.get("start") }) diff --git a/python/geoengine_openapi_client/models/time_reference.py b/python/geoengine_openapi_client/models/time_reference.py index 3c2c1fb0..4bcc47ca 100644 --- a/python/geoengine_openapi_client/models/time_reference.py +++ b/python/geoengine_openapi_client/models/time_reference.py @@ -13,13 +13,10 @@ """ # noqa: E501 +from __future__ import annotations import json -import pprint -import re # noqa: F401 -from aenum import Enum, no_arg - - - +from enum import Enum +from typing_extensions import Self class TimeReference(str, Enum): @@ -34,8 +31,8 @@ class TimeReference(str, Enum): END = 'end' @classmethod - def from_json(cls, json_str: str) -> TimeReference: + def from_json(cls, json_str: str) -> Self: """Create an instance of TimeReference from a JSON string""" - return TimeReference(json.loads(json_str)) + return cls(json.loads(json_str)) diff --git a/python/geoengine_openapi_client/models/time_step.py b/python/geoengine_openapi_client/models/time_step.py index 1e7dc352..6de7a1c4 100644 --- a/python/geoengine_openapi_client/models/time_step.py +++ b/python/geoengine_openapi_client/models/time_step.py @@ -18,55 +18,72 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, conint +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List +from typing_extensions import Annotated from geoengine_openapi_client.models.time_granularity import TimeGranularity +from typing import Optional, Set +from typing_extensions import Self class TimeStep(BaseModel): """ TimeStep - """ - granularity: TimeGranularity = Field(...) - step: conint(strict=True, ge=0) = Field(...) - __properties = ["granularity", "step"] + """ # noqa: E501 + granularity: TimeGranularity + step: Annotated[int, Field(strict=True, ge=0)] + __properties: ClassVar[List[str]] = ["granularity", "step"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> TimeStep: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of TimeStep from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> TimeStep: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of TimeStep from a dict""" if obj is None: return None if not isinstance(obj, dict): - return TimeStep.parse_obj(obj) + return cls.model_validate(obj) - _obj = TimeStep.parse_obj({ + _obj = cls.model_validate({ "granularity": obj.get("granularity"), "step": obj.get("step") }) diff --git a/python/geoengine_openapi_client/models/typed_geometry.py b/python/geoengine_openapi_client/models/typed_geometry.py index 6028b1f5..05c64aae 100644 --- a/python/geoengine_openapi_client/models/typed_geometry.py +++ b/python/geoengine_openapi_client/models/typed_geometry.py @@ -14,19 +14,17 @@ from __future__ import annotations -from inspect import getfullargspec import json import pprint -import re # noqa: F401 - +from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator from typing import Any, List, Optional -from pydantic import BaseModel, Field, StrictStr, ValidationError, validator from geoengine_openapi_client.models.typed_geometry_one_of import TypedGeometryOneOf from geoengine_openapi_client.models.typed_geometry_one_of1 import TypedGeometryOneOf1 from geoengine_openapi_client.models.typed_geometry_one_of2 import TypedGeometryOneOf2 from geoengine_openapi_client.models.typed_geometry_one_of3 import TypedGeometryOneOf3 -from typing import Union, Any, List, TYPE_CHECKING from pydantic import StrictStr, Field +from typing import Union, List, Set, Optional, Dict +from typing_extensions import Literal, Self TYPEDGEOMETRY_ONE_OF_SCHEMAS = ["TypedGeometryOneOf", "TypedGeometryOneOf1", "TypedGeometryOneOf2", "TypedGeometryOneOf3"] @@ -42,14 +40,14 @@ class TypedGeometry(BaseModel): oneof_schema_3_validator: Optional[TypedGeometryOneOf2] = None # data type: TypedGeometryOneOf3 oneof_schema_4_validator: Optional[TypedGeometryOneOf3] = None - if TYPE_CHECKING: - actual_instance: Union[TypedGeometryOneOf, TypedGeometryOneOf1, TypedGeometryOneOf2, TypedGeometryOneOf3] - else: - actual_instance: Any - one_of_schemas: List[str] = Field(TYPEDGEOMETRY_ONE_OF_SCHEMAS, const=True) + actual_instance: Optional[Union[TypedGeometryOneOf, TypedGeometryOneOf1, TypedGeometryOneOf2, TypedGeometryOneOf3]] = None + one_of_schemas: Set[str] = { "TypedGeometryOneOf", "TypedGeometryOneOf1", "TypedGeometryOneOf2", "TypedGeometryOneOf3" } + + model_config = ConfigDict( + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - validate_assignment = True def __init__(self, *args, **kwargs) -> None: if args: @@ -61,9 +59,9 @@ def __init__(self, *args, **kwargs) -> None: else: super().__init__(**kwargs) - @validator('actual_instance') + @field_validator('actual_instance') def actual_instance_must_validate_oneof(cls, v): - instance = TypedGeometry.construct() + instance = TypedGeometry.model_construct() error_messages = [] match = 0 # validate data type: TypedGeometryOneOf @@ -96,13 +94,13 @@ def actual_instance_must_validate_oneof(cls, v): return v @classmethod - def from_dict(cls, obj: dict) -> TypedGeometry: + def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: return cls.from_json(json.dumps(obj)) @classmethod - def from_json(cls, json_str: str) -> TypedGeometry: + def from_json(cls, json_str: str) -> Self: """Returns the object represented by the json string""" - instance = TypedGeometry.construct() + instance = cls.model_construct() error_messages = [] match = 0 @@ -145,19 +143,17 @@ def to_json(self) -> str: if self.actual_instance is None: return "null" - to_json = getattr(self.actual_instance, "to_json", None) - if callable(to_json): + if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): return self.actual_instance.to_json() else: return json.dumps(self.actual_instance) - def to_dict(self) -> dict: + def to_dict(self) -> Optional[Union[Dict[str, Any], TypedGeometryOneOf, TypedGeometryOneOf1, TypedGeometryOneOf2, TypedGeometryOneOf3]]: """Returns the dict representation of the actual instance""" if self.actual_instance is None: return None - to_dict = getattr(self.actual_instance, "to_dict", None) - if callable(to_dict): + if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): return self.actual_instance.to_dict() else: # primitive type @@ -165,6 +161,6 @@ def to_dict(self) -> dict: def to_str(self) -> str: """Returns the string representation of the actual instance""" - return pprint.pformat(self.dict()) + return pprint.pformat(self.model_dump()) diff --git a/python/geoengine_openapi_client/models/typed_geometry_one_of.py b/python/geoengine_openapi_client/models/typed_geometry_one_of.py index e89d66d1..d1108fc6 100644 --- a/python/geoengine_openapi_client/models/typed_geometry_one_of.py +++ b/python/geoengine_openapi_client/models/typed_geometry_one_of.py @@ -18,59 +18,75 @@ import re # noqa: F401 import json - -from typing import Any, Optional -from pydantic import BaseModel, Field +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self class TypedGeometryOneOf(BaseModel): """ TypedGeometryOneOf - """ - data: Optional[Any] = Field(..., alias="Data") - __properties = ["Data"] + """ # noqa: E501 + data: Optional[Any] = Field(alias="Data") + __properties: ClassVar[List[str]] = ["Data"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> TypedGeometryOneOf: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of TypedGeometryOneOf from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # set to None if data (nullable) is None - # and __fields_set__ contains the field - if self.data is None and "data" in self.__fields_set__: + # and model_fields_set contains the field + if self.data is None and "data" in self.model_fields_set: _dict['Data'] = None return _dict @classmethod - def from_dict(cls, obj: dict) -> TypedGeometryOneOf: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of TypedGeometryOneOf from a dict""" if obj is None: return None if not isinstance(obj, dict): - return TypedGeometryOneOf.parse_obj(obj) + return cls.model_validate(obj) - _obj = TypedGeometryOneOf.parse_obj({ - "data": obj.get("Data") + _obj = cls.model_validate({ + "Data": obj.get("Data") }) return _obj diff --git a/python/geoengine_openapi_client/models/typed_geometry_one_of1.py b/python/geoengine_openapi_client/models/typed_geometry_one_of1.py index 0d41098b..3dbe55ae 100644 --- a/python/geoengine_openapi_client/models/typed_geometry_one_of1.py +++ b/python/geoengine_openapi_client/models/typed_geometry_one_of1.py @@ -18,58 +18,74 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List from geoengine_openapi_client.models.multi_point import MultiPoint +from typing import Optional, Set +from typing_extensions import Self class TypedGeometryOneOf1(BaseModel): """ TypedGeometryOneOf1 - """ - multi_point: MultiPoint = Field(..., alias="MultiPoint") - __properties = ["MultiPoint"] + """ # noqa: E501 + multi_point: MultiPoint = Field(alias="MultiPoint") + __properties: ClassVar[List[str]] = ["MultiPoint"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> TypedGeometryOneOf1: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of TypedGeometryOneOf1 from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of multi_point if self.multi_point: _dict['MultiPoint'] = self.multi_point.to_dict() return _dict @classmethod - def from_dict(cls, obj: dict) -> TypedGeometryOneOf1: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of TypedGeometryOneOf1 from a dict""" if obj is None: return None if not isinstance(obj, dict): - return TypedGeometryOneOf1.parse_obj(obj) + return cls.model_validate(obj) - _obj = TypedGeometryOneOf1.parse_obj({ - "multi_point": MultiPoint.from_dict(obj.get("MultiPoint")) if obj.get("MultiPoint") is not None else None + _obj = cls.model_validate({ + "MultiPoint": MultiPoint.from_dict(obj["MultiPoint"]) if obj.get("MultiPoint") is not None else None }) return _obj diff --git a/python/geoengine_openapi_client/models/typed_geometry_one_of2.py b/python/geoengine_openapi_client/models/typed_geometry_one_of2.py index 3abd3c4f..ef865f71 100644 --- a/python/geoengine_openapi_client/models/typed_geometry_one_of2.py +++ b/python/geoengine_openapi_client/models/typed_geometry_one_of2.py @@ -18,58 +18,74 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List from geoengine_openapi_client.models.multi_line_string import MultiLineString +from typing import Optional, Set +from typing_extensions import Self class TypedGeometryOneOf2(BaseModel): """ TypedGeometryOneOf2 - """ - multi_line_string: MultiLineString = Field(..., alias="MultiLineString") - __properties = ["MultiLineString"] + """ # noqa: E501 + multi_line_string: MultiLineString = Field(alias="MultiLineString") + __properties: ClassVar[List[str]] = ["MultiLineString"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> TypedGeometryOneOf2: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of TypedGeometryOneOf2 from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of multi_line_string if self.multi_line_string: _dict['MultiLineString'] = self.multi_line_string.to_dict() return _dict @classmethod - def from_dict(cls, obj: dict) -> TypedGeometryOneOf2: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of TypedGeometryOneOf2 from a dict""" if obj is None: return None if not isinstance(obj, dict): - return TypedGeometryOneOf2.parse_obj(obj) + return cls.model_validate(obj) - _obj = TypedGeometryOneOf2.parse_obj({ - "multi_line_string": MultiLineString.from_dict(obj.get("MultiLineString")) if obj.get("MultiLineString") is not None else None + _obj = cls.model_validate({ + "MultiLineString": MultiLineString.from_dict(obj["MultiLineString"]) if obj.get("MultiLineString") is not None else None }) return _obj diff --git a/python/geoengine_openapi_client/models/typed_geometry_one_of3.py b/python/geoengine_openapi_client/models/typed_geometry_one_of3.py index abd97bf2..7a48fcc0 100644 --- a/python/geoengine_openapi_client/models/typed_geometry_one_of3.py +++ b/python/geoengine_openapi_client/models/typed_geometry_one_of3.py @@ -18,58 +18,74 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List from geoengine_openapi_client.models.multi_polygon import MultiPolygon +from typing import Optional, Set +from typing_extensions import Self class TypedGeometryOneOf3(BaseModel): """ TypedGeometryOneOf3 - """ - multi_polygon: MultiPolygon = Field(..., alias="MultiPolygon") - __properties = ["MultiPolygon"] + """ # noqa: E501 + multi_polygon: MultiPolygon = Field(alias="MultiPolygon") + __properties: ClassVar[List[str]] = ["MultiPolygon"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> TypedGeometryOneOf3: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of TypedGeometryOneOf3 from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of multi_polygon if self.multi_polygon: _dict['MultiPolygon'] = self.multi_polygon.to_dict() return _dict @classmethod - def from_dict(cls, obj: dict) -> TypedGeometryOneOf3: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of TypedGeometryOneOf3 from a dict""" if obj is None: return None if not isinstance(obj, dict): - return TypedGeometryOneOf3.parse_obj(obj) + return cls.model_validate(obj) - _obj = TypedGeometryOneOf3.parse_obj({ - "multi_polygon": MultiPolygon.from_dict(obj.get("MultiPolygon")) if obj.get("MultiPolygon") is not None else None + _obj = cls.model_validate({ + "MultiPolygon": MultiPolygon.from_dict(obj["MultiPolygon"]) if obj.get("MultiPolygon") is not None else None }) return _obj diff --git a/python/geoengine_openapi_client/models/typed_operator.py b/python/geoengine_openapi_client/models/typed_operator.py index a32032b2..a0c4a0cc 100644 --- a/python/geoengine_openapi_client/models/typed_operator.py +++ b/python/geoengine_openapi_client/models/typed_operator.py @@ -18,66 +18,82 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr, validator +from pydantic import BaseModel, ConfigDict, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List from geoengine_openapi_client.models.typed_operator_operator import TypedOperatorOperator +from typing import Optional, Set +from typing_extensions import Self class TypedOperator(BaseModel): """ - An enum to differentiate between `Operator` variants # noqa: E501 - """ - operator: TypedOperatorOperator = Field(...) - type: StrictStr = Field(...) - __properties = ["operator", "type"] + An enum to differentiate between `Operator` variants + """ # noqa: E501 + operator: TypedOperatorOperator + type: StrictStr + __properties: ClassVar[List[str]] = ["operator", "type"] - @validator('type') + @field_validator('type') def type_validate_enum(cls, value): """Validates the enum""" - if value not in ('Vector', 'Raster', 'Plot'): + if value not in set(['Vector', 'Raster', 'Plot']): raise ValueError("must be one of enum values ('Vector', 'Raster', 'Plot')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> TypedOperator: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of TypedOperator from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of operator if self.operator: _dict['operator'] = self.operator.to_dict() return _dict @classmethod - def from_dict(cls, obj: dict) -> TypedOperator: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of TypedOperator from a dict""" if obj is None: return None if not isinstance(obj, dict): - return TypedOperator.parse_obj(obj) + return cls.model_validate(obj) - _obj = TypedOperator.parse_obj({ - "operator": TypedOperatorOperator.from_dict(obj.get("operator")) if obj.get("operator") is not None else None, + _obj = cls.model_validate({ + "operator": TypedOperatorOperator.from_dict(obj["operator"]) if obj.get("operator") is not None else None, "type": obj.get("type") }) return _obj diff --git a/python/geoengine_openapi_client/models/typed_operator_operator.py b/python/geoengine_openapi_client/models/typed_operator_operator.py index 561df952..453c5bc2 100644 --- a/python/geoengine_openapi_client/models/typed_operator_operator.py +++ b/python/geoengine_openapi_client/models/typed_operator_operator.py @@ -18,55 +18,71 @@ import re # noqa: F401 import json - -from typing import Any, Dict, Optional -from pydantic import BaseModel, Field, StrictStr +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self class TypedOperatorOperator(BaseModel): """ TypedOperatorOperator - """ + """ # noqa: E501 params: Optional[Dict[str, Any]] = None sources: Optional[Dict[str, Any]] = None - type: StrictStr = Field(...) - __properties = ["params", "sources", "type"] + type: StrictStr + __properties: ClassVar[List[str]] = ["params", "sources", "type"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> TypedOperatorOperator: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of TypedOperatorOperator from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> TypedOperatorOperator: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of TypedOperatorOperator from a dict""" if obj is None: return None if not isinstance(obj, dict): - return TypedOperatorOperator.parse_obj(obj) + return cls.model_validate(obj) - _obj = TypedOperatorOperator.parse_obj({ + _obj = cls.model_validate({ "params": obj.get("params"), "sources": obj.get("sources"), "type": obj.get("type") diff --git a/python/geoengine_openapi_client/models/typed_plot_result_descriptor.py b/python/geoengine_openapi_client/models/typed_plot_result_descriptor.py index e211bf04..96759361 100644 --- a/python/geoengine_openapi_client/models/typed_plot_result_descriptor.py +++ b/python/geoengine_openapi_client/models/typed_plot_result_descriptor.py @@ -18,53 +18,69 @@ import re # noqa: F401 import json - -from typing import Optional -from pydantic import BaseModel, Field, StrictStr, validator +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional from geoengine_openapi_client.models.bounding_box2_d import BoundingBox2D from geoengine_openapi_client.models.time_interval import TimeInterval +from typing import Optional, Set +from typing_extensions import Self class TypedPlotResultDescriptor(BaseModel): """ - A `ResultDescriptor` for plot queries # noqa: E501 - """ + A `ResultDescriptor` for plot queries + """ # noqa: E501 bbox: Optional[BoundingBox2D] = None - spatial_reference: StrictStr = Field(..., alias="spatialReference") + spatial_reference: StrictStr = Field(alias="spatialReference") time: Optional[TimeInterval] = None - type: StrictStr = Field(...) - __properties = ["bbox", "spatialReference", "time", "type"] + type: StrictStr + __properties: ClassVar[List[str]] = ["bbox", "spatialReference", "time", "type"] - @validator('type') + @field_validator('type') def type_validate_enum(cls, value): """Validates the enum""" - if value not in ('plot', 'raster', 'vector'): - raise ValueError("must be one of enum values ('plot', 'raster', 'vector')") + if value not in set(['plot']): + raise ValueError("must be one of enum values ('plot')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> TypedPlotResultDescriptor: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of TypedPlotResultDescriptor from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of bbox if self.bbox: _dict['bbox'] = self.bbox.to_dict() @@ -72,30 +88,30 @@ def to_dict(self): if self.time: _dict['time'] = self.time.to_dict() # set to None if bbox (nullable) is None - # and __fields_set__ contains the field - if self.bbox is None and "bbox" in self.__fields_set__: + # and model_fields_set contains the field + if self.bbox is None and "bbox" in self.model_fields_set: _dict['bbox'] = None # set to None if time (nullable) is None - # and __fields_set__ contains the field - if self.time is None and "time" in self.__fields_set__: + # and model_fields_set contains the field + if self.time is None and "time" in self.model_fields_set: _dict['time'] = None return _dict @classmethod - def from_dict(cls, obj: dict) -> TypedPlotResultDescriptor: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of TypedPlotResultDescriptor from a dict""" if obj is None: return None if not isinstance(obj, dict): - return TypedPlotResultDescriptor.parse_obj(obj) + return cls.model_validate(obj) - _obj = TypedPlotResultDescriptor.parse_obj({ - "bbox": BoundingBox2D.from_dict(obj.get("bbox")) if obj.get("bbox") is not None else None, - "spatial_reference": obj.get("spatialReference"), - "time": TimeInterval.from_dict(obj.get("time")) if obj.get("time") is not None else None, + _obj = cls.model_validate({ + "bbox": BoundingBox2D.from_dict(obj["bbox"]) if obj.get("bbox") is not None else None, + "spatialReference": obj.get("spatialReference"), + "time": TimeInterval.from_dict(obj["time"]) if obj.get("time") is not None else None, "type": obj.get("type") }) return _obj diff --git a/python/geoengine_openapi_client/models/typed_raster_result_descriptor.py b/python/geoengine_openapi_client/models/typed_raster_result_descriptor.py index f926ba10..bd6fe2b1 100644 --- a/python/geoengine_openapi_client/models/typed_raster_result_descriptor.py +++ b/python/geoengine_openapi_client/models/typed_raster_result_descriptor.py @@ -18,65 +18,81 @@ import re # noqa: F401 import json - -from typing import List, Optional -from pydantic import BaseModel, Field, StrictStr, conlist, validator +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional from geoengine_openapi_client.models.raster_band_descriptor import RasterBandDescriptor from geoengine_openapi_client.models.raster_data_type import RasterDataType from geoengine_openapi_client.models.spatial_partition2_d import SpatialPartition2D from geoengine_openapi_client.models.spatial_resolution import SpatialResolution from geoengine_openapi_client.models.time_interval import TimeInterval +from typing import Optional, Set +from typing_extensions import Self class TypedRasterResultDescriptor(BaseModel): """ - A `ResultDescriptor` for raster queries # noqa: E501 - """ - bands: conlist(RasterBandDescriptor) = Field(...) + A `ResultDescriptor` for raster queries + """ # noqa: E501 + bands: List[RasterBandDescriptor] bbox: Optional[SpatialPartition2D] = None - data_type: RasterDataType = Field(..., alias="dataType") + data_type: RasterDataType = Field(alias="dataType") resolution: Optional[SpatialResolution] = None - spatial_reference: StrictStr = Field(..., alias="spatialReference") + spatial_reference: StrictStr = Field(alias="spatialReference") time: Optional[TimeInterval] = None - type: StrictStr = Field(...) - __properties = ["bands", "bbox", "dataType", "resolution", "spatialReference", "time", "type"] + type: StrictStr + __properties: ClassVar[List[str]] = ["bands", "bbox", "dataType", "resolution", "spatialReference", "time", "type"] - @validator('type') + @field_validator('type') def type_validate_enum(cls, value): """Validates the enum""" - if value not in ('raster'): + if value not in set(['raster']): raise ValueError("must be one of enum values ('raster')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> TypedRasterResultDescriptor: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of TypedRasterResultDescriptor from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of each item in bands (list) _items = [] if self.bands: - for _item in self.bands: - if _item: - _items.append(_item.to_dict()) + for _item_bands in self.bands: + if _item_bands: + _items.append(_item_bands.to_dict()) _dict['bands'] = _items # override the default output from pydantic by calling `to_dict()` of bbox if self.bbox: @@ -88,38 +104,38 @@ def to_dict(self): if self.time: _dict['time'] = self.time.to_dict() # set to None if bbox (nullable) is None - # and __fields_set__ contains the field - if self.bbox is None and "bbox" in self.__fields_set__: + # and model_fields_set contains the field + if self.bbox is None and "bbox" in self.model_fields_set: _dict['bbox'] = None # set to None if resolution (nullable) is None - # and __fields_set__ contains the field - if self.resolution is None and "resolution" in self.__fields_set__: + # and model_fields_set contains the field + if self.resolution is None and "resolution" in self.model_fields_set: _dict['resolution'] = None # set to None if time (nullable) is None - # and __fields_set__ contains the field - if self.time is None and "time" in self.__fields_set__: + # and model_fields_set contains the field + if self.time is None and "time" in self.model_fields_set: _dict['time'] = None return _dict @classmethod - def from_dict(cls, obj: dict) -> TypedRasterResultDescriptor: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of TypedRasterResultDescriptor from a dict""" if obj is None: return None if not isinstance(obj, dict): - return TypedRasterResultDescriptor.parse_obj(obj) - - _obj = TypedRasterResultDescriptor.parse_obj({ - "bands": [RasterBandDescriptor.from_dict(_item) for _item in obj.get("bands")] if obj.get("bands") is not None else None, - "bbox": SpatialPartition2D.from_dict(obj.get("bbox")) if obj.get("bbox") is not None else None, - "data_type": obj.get("dataType"), - "resolution": SpatialResolution.from_dict(obj.get("resolution")) if obj.get("resolution") is not None else None, - "spatial_reference": obj.get("spatialReference"), - "time": TimeInterval.from_dict(obj.get("time")) if obj.get("time") is not None else None, + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "bands": [RasterBandDescriptor.from_dict(_item) for _item in obj["bands"]] if obj.get("bands") is not None else None, + "bbox": SpatialPartition2D.from_dict(obj["bbox"]) if obj.get("bbox") is not None else None, + "dataType": obj.get("dataType"), + "resolution": SpatialResolution.from_dict(obj["resolution"]) if obj.get("resolution") is not None else None, + "spatialReference": obj.get("spatialReference"), + "time": TimeInterval.from_dict(obj["time"]) if obj.get("time") is not None else None, "type": obj.get("type") }) return _obj diff --git a/python/geoengine_openapi_client/models/typed_result_descriptor.py b/python/geoengine_openapi_client/models/typed_result_descriptor.py index 5bb4794c..f06c5e53 100644 --- a/python/geoengine_openapi_client/models/typed_result_descriptor.py +++ b/python/geoengine_openapi_client/models/typed_result_descriptor.py @@ -14,18 +14,16 @@ from __future__ import annotations -from inspect import getfullargspec import json import pprint -import re # noqa: F401 - +from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator from typing import Any, List, Optional -from pydantic import BaseModel, Field, StrictStr, ValidationError, validator from geoengine_openapi_client.models.typed_plot_result_descriptor import TypedPlotResultDescriptor from geoengine_openapi_client.models.typed_raster_result_descriptor import TypedRasterResultDescriptor from geoengine_openapi_client.models.typed_vector_result_descriptor import TypedVectorResultDescriptor -from typing import Union, Any, List, TYPE_CHECKING from pydantic import StrictStr, Field +from typing import Union, List, Set, Optional, Dict +from typing_extensions import Literal, Self TYPEDRESULTDESCRIPTOR_ONE_OF_SCHEMAS = ["TypedPlotResultDescriptor", "TypedRasterResultDescriptor", "TypedVectorResultDescriptor"] @@ -39,16 +37,16 @@ class TypedResultDescriptor(BaseModel): oneof_schema_2_validator: Optional[TypedRasterResultDescriptor] = None # data type: TypedVectorResultDescriptor oneof_schema_3_validator: Optional[TypedVectorResultDescriptor] = None - if TYPE_CHECKING: - actual_instance: Union[TypedPlotResultDescriptor, TypedRasterResultDescriptor, TypedVectorResultDescriptor] - else: - actual_instance: Any - one_of_schemas: List[str] = Field(TYPEDRESULTDESCRIPTOR_ONE_OF_SCHEMAS, const=True) + actual_instance: Optional[Union[TypedPlotResultDescriptor, TypedRasterResultDescriptor, TypedVectorResultDescriptor]] = None + one_of_schemas: Set[str] = { "TypedPlotResultDescriptor", "TypedRasterResultDescriptor", "TypedVectorResultDescriptor" } + + model_config = ConfigDict( + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - validate_assignment = True - discriminator_value_class_map = { + discriminator_value_class_map: Dict[str, str] = { } def __init__(self, *args, **kwargs) -> None: @@ -61,9 +59,9 @@ def __init__(self, *args, **kwargs) -> None: else: super().__init__(**kwargs) - @validator('actual_instance') + @field_validator('actual_instance') def actual_instance_must_validate_oneof(cls, v): - instance = TypedResultDescriptor.construct() + instance = TypedResultDescriptor.model_construct() error_messages = [] match = 0 # validate data type: TypedPlotResultDescriptor @@ -91,13 +89,13 @@ def actual_instance_must_validate_oneof(cls, v): return v @classmethod - def from_dict(cls, obj: dict) -> TypedResultDescriptor: + def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: return cls.from_json(json.dumps(obj)) @classmethod - def from_json(cls, json_str: str) -> TypedResultDescriptor: + def from_json(cls, json_str: str) -> Self: """Returns the object represented by the json string""" - instance = TypedResultDescriptor.construct() + instance = cls.model_construct() error_messages = [] match = 0 @@ -107,32 +105,32 @@ def from_json(cls, json_str: str) -> TypedResultDescriptor: raise ValueError("Failed to lookup data type from the field `type` in the input.") # check if data type is `TypedPlotResultDescriptor` - if _data_type == "TypedPlotResultDescriptor": + if _data_type == "plot": instance.actual_instance = TypedPlotResultDescriptor.from_json(json_str) return instance # check if data type is `TypedRasterResultDescriptor` - if _data_type == "TypedRasterResultDescriptor": + if _data_type == "raster": instance.actual_instance = TypedRasterResultDescriptor.from_json(json_str) return instance # check if data type is `TypedVectorResultDescriptor` - if _data_type == "TypedVectorResultDescriptor": + if _data_type == "vector": instance.actual_instance = TypedVectorResultDescriptor.from_json(json_str) return instance # check if data type is `TypedPlotResultDescriptor` - if _data_type == "plot": + if _data_type == "TypedPlotResultDescriptor": instance.actual_instance = TypedPlotResultDescriptor.from_json(json_str) return instance # check if data type is `TypedRasterResultDescriptor` - if _data_type == "raster": + if _data_type == "TypedRasterResultDescriptor": instance.actual_instance = TypedRasterResultDescriptor.from_json(json_str) return instance # check if data type is `TypedVectorResultDescriptor` - if _data_type == "vector": + if _data_type == "TypedVectorResultDescriptor": instance.actual_instance = TypedVectorResultDescriptor.from_json(json_str) return instance @@ -169,19 +167,17 @@ def to_json(self) -> str: if self.actual_instance is None: return "null" - to_json = getattr(self.actual_instance, "to_json", None) - if callable(to_json): + if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): return self.actual_instance.to_json() else: return json.dumps(self.actual_instance) - def to_dict(self) -> dict: + def to_dict(self) -> Optional[Union[Dict[str, Any], TypedPlotResultDescriptor, TypedRasterResultDescriptor, TypedVectorResultDescriptor]]: """Returns the dict representation of the actual instance""" if self.actual_instance is None: return None - to_dict = getattr(self.actual_instance, "to_dict", None) - if callable(to_dict): + if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): return self.actual_instance.to_dict() else: # primitive type @@ -189,6 +185,6 @@ def to_dict(self) -> dict: def to_str(self) -> str: """Returns the string representation of the actual instance""" - return pprint.pformat(self.dict()) + return pprint.pformat(self.model_dump()) diff --git a/python/geoengine_openapi_client/models/typed_vector_result_descriptor.py b/python/geoengine_openapi_client/models/typed_vector_result_descriptor.py index 5e151881..90281332 100644 --- a/python/geoengine_openapi_client/models/typed_vector_result_descriptor.py +++ b/python/geoengine_openapi_client/models/typed_vector_result_descriptor.py @@ -18,102 +18,118 @@ import re # noqa: F401 import json - -from typing import Dict, Optional -from pydantic import BaseModel, Field, StrictStr, validator +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional from geoengine_openapi_client.models.bounding_box2_d import BoundingBox2D from geoengine_openapi_client.models.time_interval import TimeInterval from geoengine_openapi_client.models.vector_column_info import VectorColumnInfo from geoengine_openapi_client.models.vector_data_type import VectorDataType +from typing import Optional, Set +from typing_extensions import Self class TypedVectorResultDescriptor(BaseModel): """ TypedVectorResultDescriptor - """ + """ # noqa: E501 bbox: Optional[BoundingBox2D] = None - columns: Dict[str, VectorColumnInfo] = Field(...) - data_type: VectorDataType = Field(..., alias="dataType") - spatial_reference: StrictStr = Field(..., alias="spatialReference") + columns: Dict[str, VectorColumnInfo] + data_type: VectorDataType = Field(alias="dataType") + spatial_reference: StrictStr = Field(alias="spatialReference") time: Optional[TimeInterval] = None - type: StrictStr = Field(...) - __properties = ["bbox", "columns", "dataType", "spatialReference", "time", "type"] + type: StrictStr + __properties: ClassVar[List[str]] = ["bbox", "columns", "dataType", "spatialReference", "time", "type"] - @validator('type') + @field_validator('type') def type_validate_enum(cls, value): """Validates the enum""" - if value not in ('vector'): + if value not in set(['vector']): raise ValueError("must be one of enum values ('vector')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> TypedVectorResultDescriptor: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of TypedVectorResultDescriptor from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of bbox if self.bbox: _dict['bbox'] = self.bbox.to_dict() # override the default output from pydantic by calling `to_dict()` of each value in columns (dict) _field_dict = {} if self.columns: - for _key in self.columns: - if self.columns[_key]: - _field_dict[_key] = self.columns[_key].to_dict() + for _key_columns in self.columns: + if self.columns[_key_columns]: + _field_dict[_key_columns] = self.columns[_key_columns].to_dict() _dict['columns'] = _field_dict # override the default output from pydantic by calling `to_dict()` of time if self.time: _dict['time'] = self.time.to_dict() # set to None if bbox (nullable) is None - # and __fields_set__ contains the field - if self.bbox is None and "bbox" in self.__fields_set__: + # and model_fields_set contains the field + if self.bbox is None and "bbox" in self.model_fields_set: _dict['bbox'] = None # set to None if time (nullable) is None - # and __fields_set__ contains the field - if self.time is None and "time" in self.__fields_set__: + # and model_fields_set contains the field + if self.time is None and "time" in self.model_fields_set: _dict['time'] = None return _dict @classmethod - def from_dict(cls, obj: dict) -> TypedVectorResultDescriptor: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of TypedVectorResultDescriptor from a dict""" if obj is None: return None if not isinstance(obj, dict): - return TypedVectorResultDescriptor.parse_obj(obj) + return cls.model_validate(obj) - _obj = TypedVectorResultDescriptor.parse_obj({ - "bbox": BoundingBox2D.from_dict(obj.get("bbox")) if obj.get("bbox") is not None else None, + _obj = cls.model_validate({ + "bbox": BoundingBox2D.from_dict(obj["bbox"]) if obj.get("bbox") is not None else None, "columns": dict( (_k, VectorColumnInfo.from_dict(_v)) - for _k, _v in obj.get("columns").items() + for _k, _v in obj["columns"].items() ) if obj.get("columns") is not None else None, - "data_type": obj.get("dataType"), - "spatial_reference": obj.get("spatialReference"), - "time": TimeInterval.from_dict(obj.get("time")) if obj.get("time") is not None else None, + "dataType": obj.get("dataType"), + "spatialReference": obj.get("spatialReference"), + "time": TimeInterval.from_dict(obj["time"]) if obj.get("time") is not None else None, "type": obj.get("type") }) return _obj diff --git a/python/geoengine_openapi_client/models/unitless_measurement.py b/python/geoengine_openapi_client/models/unitless_measurement.py index c0ba7121..4c27eafe 100644 --- a/python/geoengine_openapi_client/models/unitless_measurement.py +++ b/python/geoengine_openapi_client/models/unitless_measurement.py @@ -18,60 +18,76 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr, validator +from pydantic import BaseModel, ConfigDict, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self class UnitlessMeasurement(BaseModel): """ UnitlessMeasurement - """ - type: StrictStr = Field(...) - __properties = ["type"] + """ # noqa: E501 + type: StrictStr + __properties: ClassVar[List[str]] = ["type"] - @validator('type') + @field_validator('type') def type_validate_enum(cls, value): """Validates the enum""" - if value not in ('unitless', 'continuous', 'classification'): - raise ValueError("must be one of enum values ('unitless', 'continuous', 'classification')") + if value not in set(['unitless']): + raise ValueError("must be one of enum values ('unitless')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> UnitlessMeasurement: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of UnitlessMeasurement from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> UnitlessMeasurement: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of UnitlessMeasurement from a dict""" if obj is None: return None if not isinstance(obj, dict): - return UnitlessMeasurement.parse_obj(obj) + return cls.model_validate(obj) - _obj = UnitlessMeasurement.parse_obj({ + _obj = cls.model_validate({ "type": obj.get("type") }) return _obj diff --git a/python/geoengine_openapi_client/models/unix_time_stamp_type.py b/python/geoengine_openapi_client/models/unix_time_stamp_type.py index c381609e..1ad4b3a3 100644 --- a/python/geoengine_openapi_client/models/unix_time_stamp_type.py +++ b/python/geoengine_openapi_client/models/unix_time_stamp_type.py @@ -13,13 +13,10 @@ """ # noqa: E501 +from __future__ import annotations import json -import pprint -import re # noqa: F401 -from aenum import Enum, no_arg - - - +from enum import Enum +from typing_extensions import Self class UnixTimeStampType(str, Enum): @@ -34,8 +31,8 @@ class UnixTimeStampType(str, Enum): EPOCHMILLISECONDS = 'epochMilliseconds' @classmethod - def from_json(cls, json_str: str) -> UnixTimeStampType: + def from_json(cls, json_str: str) -> Self: """Create an instance of UnixTimeStampType from a JSON string""" - return UnixTimeStampType(json.loads(json_str)) + return cls(json.loads(json_str)) diff --git a/python/geoengine_openapi_client/models/update_dataset.py b/python/geoengine_openapi_client/models/update_dataset.py index 23d309d6..21ab8a28 100644 --- a/python/geoengine_openapi_client/models/update_dataset.py +++ b/python/geoengine_openapi_client/models/update_dataset.py @@ -18,56 +18,72 @@ import re # noqa: F401 import json - -from typing import List -from pydantic import BaseModel, Field, StrictStr, conlist +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self class UpdateDataset(BaseModel): """ UpdateDataset - """ - description: StrictStr = Field(...) - display_name: StrictStr = Field(...) - name: StrictStr = Field(...) - tags: conlist(StrictStr) = Field(...) - __properties = ["description", "display_name", "name", "tags"] + """ # noqa: E501 + description: StrictStr + display_name: StrictStr + name: StrictStr + tags: List[StrictStr] + __properties: ClassVar[List[str]] = ["description", "display_name", "name", "tags"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> UpdateDataset: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of UpdateDataset from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> UpdateDataset: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of UpdateDataset from a dict""" if obj is None: return None if not isinstance(obj, dict): - return UpdateDataset.parse_obj(obj) + return cls.model_validate(obj) - _obj = UpdateDataset.parse_obj({ + _obj = cls.model_validate({ "description": obj.get("description"), "display_name": obj.get("display_name"), "name": obj.get("name"), diff --git a/python/geoengine_openapi_client/models/update_layer.py b/python/geoengine_openapi_client/models/update_layer.py index ee7faed5..e99cb46a 100644 --- a/python/geoengine_openapi_client/models/update_layer.py +++ b/python/geoengine_openapi_client/models/update_layer.py @@ -18,48 +18,65 @@ import re # noqa: F401 import json - -from typing import Dict, List, Optional -from pydantic import BaseModel, Field, StrictStr, conlist +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing_extensions import Annotated from geoengine_openapi_client.models.symbology import Symbology from geoengine_openapi_client.models.workflow import Workflow +from typing import Optional, Set +from typing_extensions import Self class UpdateLayer(BaseModel): """ UpdateLayer - """ - description: StrictStr = Field(...) - metadata: Optional[Dict[str, StrictStr]] = Field(None, description="metadata used for loading the data") - name: StrictStr = Field(...) - properties: Optional[conlist(conlist(StrictStr, max_items=2, min_items=2))] = Field(None, description="properties, for instance, to be rendered in the UI") + """ # noqa: E501 + description: StrictStr + metadata: Optional[Dict[str, StrictStr]] = Field(default=None, description="metadata used for loading the data") + name: StrictStr + properties: Optional[List[Annotated[List[StrictStr], Field(min_length=2, max_length=2)]]] = Field(default=None, description="properties, for instance, to be rendered in the UI") symbology: Optional[Symbology] = None - workflow: Workflow = Field(...) - __properties = ["description", "metadata", "name", "properties", "symbology", "workflow"] + workflow: Workflow + __properties: ClassVar[List[str]] = ["description", "metadata", "name", "properties", "symbology", "workflow"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> UpdateLayer: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of UpdateLayer from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of symbology if self.symbology: _dict['symbology'] = self.symbology.to_dict() @@ -67,28 +84,28 @@ def to_dict(self): if self.workflow: _dict['workflow'] = self.workflow.to_dict() # set to None if symbology (nullable) is None - # and __fields_set__ contains the field - if self.symbology is None and "symbology" in self.__fields_set__: + # and model_fields_set contains the field + if self.symbology is None and "symbology" in self.model_fields_set: _dict['symbology'] = None return _dict @classmethod - def from_dict(cls, obj: dict) -> UpdateLayer: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of UpdateLayer from a dict""" if obj is None: return None if not isinstance(obj, dict): - return UpdateLayer.parse_obj(obj) + return cls.model_validate(obj) - _obj = UpdateLayer.parse_obj({ + _obj = cls.model_validate({ "description": obj.get("description"), "metadata": obj.get("metadata"), "name": obj.get("name"), "properties": obj.get("properties"), - "symbology": Symbology.from_dict(obj.get("symbology")) if obj.get("symbology") is not None else None, - "workflow": Workflow.from_dict(obj.get("workflow")) if obj.get("workflow") is not None else None + "symbology": Symbology.from_dict(obj["symbology"]) if obj.get("symbology") is not None else None, + "workflow": Workflow.from_dict(obj["workflow"]) if obj.get("workflow") is not None else None }) return _obj diff --git a/python/geoengine_openapi_client/models/update_layer_collection.py b/python/geoengine_openapi_client/models/update_layer_collection.py index 35c6780d..0a6341ae 100644 --- a/python/geoengine_openapi_client/models/update_layer_collection.py +++ b/python/geoengine_openapi_client/models/update_layer_collection.py @@ -18,55 +18,72 @@ import re # noqa: F401 import json - -from typing import List, Optional -from pydantic import BaseModel, Field, StrictStr, conlist +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self class UpdateLayerCollection(BaseModel): """ UpdateLayerCollection - """ - description: StrictStr = Field(...) - name: StrictStr = Field(...) - properties: Optional[conlist(conlist(StrictStr, max_items=2, min_items=2))] = None - __properties = ["description", "name", "properties"] + """ # noqa: E501 + description: StrictStr + name: StrictStr + properties: Optional[List[Annotated[List[StrictStr], Field(min_length=2, max_length=2)]]] = None + __properties: ClassVar[List[str]] = ["description", "name", "properties"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> UpdateLayerCollection: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of UpdateLayerCollection from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> UpdateLayerCollection: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of UpdateLayerCollection from a dict""" if obj is None: return None if not isinstance(obj, dict): - return UpdateLayerCollection.parse_obj(obj) + return cls.model_validate(obj) - _obj = UpdateLayerCollection.parse_obj({ + _obj = cls.model_validate({ "description": obj.get("description"), "name": obj.get("name"), "properties": obj.get("properties") diff --git a/python/geoengine_openapi_client/models/update_project.py b/python/geoengine_openapi_client/models/update_project.py index 17eb2601..bad424d7 100644 --- a/python/geoengine_openapi_client/models/update_project.py +++ b/python/geoengine_openapi_client/models/update_project.py @@ -18,120 +18,135 @@ import re # noqa: F401 import json - -from typing import List, Optional -from pydantic import BaseModel, Field, StrictStr, conlist -from geoengine_openapi_client.models.layer_update import LayerUpdate -from geoengine_openapi_client.models.plot_update import PlotUpdate +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional from geoengine_openapi_client.models.st_rectangle import STRectangle from geoengine_openapi_client.models.time_step import TimeStep +from geoengine_openapi_client.models.vec_update import VecUpdate +from typing import Optional, Set +from typing_extensions import Self class UpdateProject(BaseModel): """ UpdateProject - """ + """ # noqa: E501 bounds: Optional[STRectangle] = None description: Optional[StrictStr] = None - id: StrictStr = Field(...) - layers: Optional[conlist(LayerUpdate)] = None + id: StrictStr + layers: Optional[List[VecUpdate]] = None name: Optional[StrictStr] = None - plots: Optional[conlist(PlotUpdate)] = None - time_step: Optional[TimeStep] = Field(None, alias="timeStep") - __properties = ["bounds", "description", "id", "layers", "name", "plots", "timeStep"] + plots: Optional[List[VecUpdate]] = None + time_step: Optional[TimeStep] = Field(default=None, alias="timeStep") + __properties: ClassVar[List[str]] = ["bounds", "description", "id", "layers", "name", "plots", "timeStep"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> UpdateProject: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of UpdateProject from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of bounds if self.bounds: _dict['bounds'] = self.bounds.to_dict() # override the default output from pydantic by calling `to_dict()` of each item in layers (list) _items = [] if self.layers: - for _item in self.layers: - if _item: - _items.append(_item.to_dict()) + for _item_layers in self.layers: + if _item_layers: + _items.append(_item_layers.to_dict()) _dict['layers'] = _items # override the default output from pydantic by calling `to_dict()` of each item in plots (list) _items = [] if self.plots: - for _item in self.plots: - if _item: - _items.append(_item.to_dict()) + for _item_plots in self.plots: + if _item_plots: + _items.append(_item_plots.to_dict()) _dict['plots'] = _items # override the default output from pydantic by calling `to_dict()` of time_step if self.time_step: _dict['timeStep'] = self.time_step.to_dict() # set to None if bounds (nullable) is None - # and __fields_set__ contains the field - if self.bounds is None and "bounds" in self.__fields_set__: + # and model_fields_set contains the field + if self.bounds is None and "bounds" in self.model_fields_set: _dict['bounds'] = None # set to None if description (nullable) is None - # and __fields_set__ contains the field - if self.description is None and "description" in self.__fields_set__: + # and model_fields_set contains the field + if self.description is None and "description" in self.model_fields_set: _dict['description'] = None # set to None if layers (nullable) is None - # and __fields_set__ contains the field - if self.layers is None and "layers" in self.__fields_set__: + # and model_fields_set contains the field + if self.layers is None and "layers" in self.model_fields_set: _dict['layers'] = None # set to None if name (nullable) is None - # and __fields_set__ contains the field - if self.name is None and "name" in self.__fields_set__: + # and model_fields_set contains the field + if self.name is None and "name" in self.model_fields_set: _dict['name'] = None # set to None if plots (nullable) is None - # and __fields_set__ contains the field - if self.plots is None and "plots" in self.__fields_set__: + # and model_fields_set contains the field + if self.plots is None and "plots" in self.model_fields_set: _dict['plots'] = None # set to None if time_step (nullable) is None - # and __fields_set__ contains the field - if self.time_step is None and "time_step" in self.__fields_set__: + # and model_fields_set contains the field + if self.time_step is None and "time_step" in self.model_fields_set: _dict['timeStep'] = None return _dict @classmethod - def from_dict(cls, obj: dict) -> UpdateProject: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of UpdateProject from a dict""" if obj is None: return None if not isinstance(obj, dict): - return UpdateProject.parse_obj(obj) + return cls.model_validate(obj) - _obj = UpdateProject.parse_obj({ - "bounds": STRectangle.from_dict(obj.get("bounds")) if obj.get("bounds") is not None else None, + _obj = cls.model_validate({ + "bounds": STRectangle.from_dict(obj["bounds"]) if obj.get("bounds") is not None else None, "description": obj.get("description"), "id": obj.get("id"), - "layers": [LayerUpdate.from_dict(_item) for _item in obj.get("layers")] if obj.get("layers") is not None else None, + "layers": [VecUpdate.from_dict(_item) for _item in obj["layers"]] if obj.get("layers") is not None else None, "name": obj.get("name"), - "plots": [PlotUpdate.from_dict(_item) for _item in obj.get("plots")] if obj.get("plots") is not None else None, - "time_step": TimeStep.from_dict(obj.get("timeStep")) if obj.get("timeStep") is not None else None + "plots": [VecUpdate.from_dict(_item) for _item in obj["plots"]] if obj.get("plots") is not None else None, + "timeStep": TimeStep.from_dict(obj["timeStep"]) if obj.get("timeStep") is not None else None }) return _obj diff --git a/python/geoengine_openapi_client/models/update_quota.py b/python/geoengine_openapi_client/models/update_quota.py index 2c75f7b3..7b871979 100644 --- a/python/geoengine_openapi_client/models/update_quota.py +++ b/python/geoengine_openapi_client/models/update_quota.py @@ -18,53 +18,69 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictInt +from pydantic import BaseModel, ConfigDict, StrictInt +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self class UpdateQuota(BaseModel): """ UpdateQuota - """ - available: StrictInt = Field(...) - __properties = ["available"] + """ # noqa: E501 + available: StrictInt + __properties: ClassVar[List[str]] = ["available"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> UpdateQuota: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of UpdateQuota from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> UpdateQuota: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of UpdateQuota from a dict""" if obj is None: return None if not isinstance(obj, dict): - return UpdateQuota.parse_obj(obj) + return cls.model_validate(obj) - _obj = UpdateQuota.parse_obj({ + _obj = cls.model_validate({ "available": obj.get("available") }) return _obj diff --git a/python/geoengine_openapi_client/models/upload_file_layers_response.py b/python/geoengine_openapi_client/models/upload_file_layers_response.py index 34ab5de7..0be461fb 100644 --- a/python/geoengine_openapi_client/models/upload_file_layers_response.py +++ b/python/geoengine_openapi_client/models/upload_file_layers_response.py @@ -18,53 +18,69 @@ import re # noqa: F401 import json - -from typing import List -from pydantic import BaseModel, Field, StrictStr, conlist +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self class UploadFileLayersResponse(BaseModel): """ UploadFileLayersResponse - """ - layers: conlist(StrictStr) = Field(...) - __properties = ["layers"] + """ # noqa: E501 + layers: List[StrictStr] + __properties: ClassVar[List[str]] = ["layers"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> UploadFileLayersResponse: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of UploadFileLayersResponse from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> UploadFileLayersResponse: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of UploadFileLayersResponse from a dict""" if obj is None: return None if not isinstance(obj, dict): - return UploadFileLayersResponse.parse_obj(obj) + return cls.model_validate(obj) - _obj = UploadFileLayersResponse.parse_obj({ + _obj = cls.model_validate({ "layers": obj.get("layers") }) return _obj diff --git a/python/geoengine_openapi_client/models/upload_files_response.py b/python/geoengine_openapi_client/models/upload_files_response.py index 1da3ffa0..233ceba9 100644 --- a/python/geoengine_openapi_client/models/upload_files_response.py +++ b/python/geoengine_openapi_client/models/upload_files_response.py @@ -18,53 +18,69 @@ import re # noqa: F401 import json - -from typing import List -from pydantic import BaseModel, Field, StrictStr, conlist +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self class UploadFilesResponse(BaseModel): """ UploadFilesResponse - """ - files: conlist(StrictStr) = Field(...) - __properties = ["files"] + """ # noqa: E501 + files: List[StrictStr] + __properties: ClassVar[List[str]] = ["files"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> UploadFilesResponse: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of UploadFilesResponse from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> UploadFilesResponse: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of UploadFilesResponse from a dict""" if obj is None: return None if not isinstance(obj, dict): - return UploadFilesResponse.parse_obj(obj) + return cls.model_validate(obj) - _obj = UploadFilesResponse.parse_obj({ + _obj = cls.model_validate({ "files": obj.get("files") }) return _obj diff --git a/python/geoengine_openapi_client/models/usage_summary_granularity.py b/python/geoengine_openapi_client/models/usage_summary_granularity.py index 57ed8688..cf7166af 100644 --- a/python/geoengine_openapi_client/models/usage_summary_granularity.py +++ b/python/geoengine_openapi_client/models/usage_summary_granularity.py @@ -13,13 +13,10 @@ """ # noqa: E501 +from __future__ import annotations import json -import pprint -import re # noqa: F401 -from aenum import Enum, no_arg - - - +from enum import Enum +from typing_extensions import Self class UsageSummaryGranularity(str, Enum): @@ -37,8 +34,8 @@ class UsageSummaryGranularity(str, Enum): YEARS = 'years' @classmethod - def from_json(cls, json_str: str) -> UsageSummaryGranularity: + def from_json(cls, json_str: str) -> Self: """Create an instance of UsageSummaryGranularity from a JSON string""" - return UsageSummaryGranularity(json.loads(json_str)) + return cls(json.loads(json_str)) diff --git a/python/geoengine_openapi_client/models/user_credentials.py b/python/geoengine_openapi_client/models/user_credentials.py index f888b9f7..1bff3987 100644 --- a/python/geoengine_openapi_client/models/user_credentials.py +++ b/python/geoengine_openapi_client/models/user_credentials.py @@ -18,54 +18,70 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self class UserCredentials(BaseModel): """ UserCredentials - """ - email: StrictStr = Field(...) - password: StrictStr = Field(...) - __properties = ["email", "password"] + """ # noqa: E501 + email: StrictStr + password: StrictStr + __properties: ClassVar[List[str]] = ["email", "password"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> UserCredentials: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of UserCredentials from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> UserCredentials: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of UserCredentials from a dict""" if obj is None: return None if not isinstance(obj, dict): - return UserCredentials.parse_obj(obj) + return cls.model_validate(obj) - _obj = UserCredentials.parse_obj({ + _obj = cls.model_validate({ "email": obj.get("email"), "password": obj.get("password") }) diff --git a/python/geoengine_openapi_client/models/user_info.py b/python/geoengine_openapi_client/models/user_info.py index 52ede41c..b751f806 100644 --- a/python/geoengine_openapi_client/models/user_info.py +++ b/python/geoengine_openapi_client/models/user_info.py @@ -18,68 +18,84 @@ import re # noqa: F401 import json - -from typing import Optional -from pydantic import BaseModel, Field, StrictStr +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self class UserInfo(BaseModel): """ UserInfo - """ + """ # noqa: E501 email: Optional[StrictStr] = None - id: StrictStr = Field(...) - real_name: Optional[StrictStr] = Field(None, alias="realName") - __properties = ["email", "id", "realName"] + id: StrictStr + real_name: Optional[StrictStr] = Field(default=None, alias="realName") + __properties: ClassVar[List[str]] = ["email", "id", "realName"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> UserInfo: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of UserInfo from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # set to None if email (nullable) is None - # and __fields_set__ contains the field - if self.email is None and "email" in self.__fields_set__: + # and model_fields_set contains the field + if self.email is None and "email" in self.model_fields_set: _dict['email'] = None # set to None if real_name (nullable) is None - # and __fields_set__ contains the field - if self.real_name is None and "real_name" in self.__fields_set__: + # and model_fields_set contains the field + if self.real_name is None and "real_name" in self.model_fields_set: _dict['realName'] = None return _dict @classmethod - def from_dict(cls, obj: dict) -> UserInfo: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of UserInfo from a dict""" if obj is None: return None if not isinstance(obj, dict): - return UserInfo.parse_obj(obj) + return cls.model_validate(obj) - _obj = UserInfo.parse_obj({ + _obj = cls.model_validate({ "email": obj.get("email"), "id": obj.get("id"), - "real_name": obj.get("realName") + "realName": obj.get("realName") }) return _obj diff --git a/python/geoengine_openapi_client/models/user_registration.py b/python/geoengine_openapi_client/models/user_registration.py index 66d409d0..7178b92a 100644 --- a/python/geoengine_openapi_client/models/user_registration.py +++ b/python/geoengine_openapi_client/models/user_registration.py @@ -18,58 +18,74 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self class UserRegistration(BaseModel): """ UserRegistration - """ - email: StrictStr = Field(...) - password: StrictStr = Field(...) - real_name: StrictStr = Field(..., alias="realName") - __properties = ["email", "password", "realName"] + """ # noqa: E501 + email: StrictStr + password: StrictStr + real_name: StrictStr = Field(alias="realName") + __properties: ClassVar[List[str]] = ["email", "password", "realName"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> UserRegistration: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of UserRegistration from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> UserRegistration: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of UserRegistration from a dict""" if obj is None: return None if not isinstance(obj, dict): - return UserRegistration.parse_obj(obj) + return cls.model_validate(obj) - _obj = UserRegistration.parse_obj({ + _obj = cls.model_validate({ "email": obj.get("email"), "password": obj.get("password"), - "real_name": obj.get("realName") + "realName": obj.get("realName") }) return _obj diff --git a/python/geoengine_openapi_client/models/user_session.py b/python/geoengine_openapi_client/models/user_session.py index 5e5dc940..15d2ac27 100644 --- a/python/geoengine_openapi_client/models/user_session.py +++ b/python/geoengine_openapi_client/models/user_session.py @@ -19,83 +19,95 @@ import json from datetime import datetime -from typing import List, Optional -from pydantic import BaseModel, Field, StrictStr, conlist +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional from geoengine_openapi_client.models.st_rectangle import STRectangle from geoengine_openapi_client.models.user_info import UserInfo +from typing import Optional, Set +from typing_extensions import Self class UserSession(BaseModel): """ UserSession - """ - created: datetime = Field(...) - id: StrictStr = Field(...) + """ # noqa: E501 + created: datetime + id: StrictStr project: Optional[StrictStr] = None - roles: conlist(StrictStr) = Field(...) - user: UserInfo = Field(...) - valid_until: datetime = Field(..., alias="validUntil") + roles: List[StrictStr] + user: UserInfo + valid_until: datetime = Field(alias="validUntil") view: Optional[STRectangle] = None - __properties = ["created", "id", "project", "roles", "user", "validUntil", "view"] + __properties: ClassVar[List[str]] = ["created", "id", "project", "roles", "user", "validUntil", "view"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> UserSession: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of UserSession from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of user if self.user: _dict['user'] = self.user.to_dict() # override the default output from pydantic by calling `to_dict()` of view if self.view: _dict['view'] = self.view.to_dict() - # set to None if project (nullable) is None - # and __fields_set__ contains the field - if self.project is None and "project" in self.__fields_set__: - _dict['project'] = None - # set to None if view (nullable) is None - # and __fields_set__ contains the field - if self.view is None and "view" in self.__fields_set__: + # and model_fields_set contains the field + if self.view is None and "view" in self.model_fields_set: _dict['view'] = None return _dict @classmethod - def from_dict(cls, obj: dict) -> UserSession: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of UserSession from a dict""" if obj is None: return None if not isinstance(obj, dict): - return UserSession.parse_obj(obj) + return cls.model_validate(obj) - _obj = UserSession.parse_obj({ + _obj = cls.model_validate({ "created": obj.get("created"), "id": obj.get("id"), "project": obj.get("project"), "roles": obj.get("roles"), - "user": UserInfo.from_dict(obj.get("user")) if obj.get("user") is not None else None, - "valid_until": obj.get("validUntil"), - "view": STRectangle.from_dict(obj.get("view")) if obj.get("view") is not None else None + "user": UserInfo.from_dict(obj["user"]) if obj.get("user") is not None else None, + "validUntil": obj.get("validUntil"), + "view": STRectangle.from_dict(obj["view"]) if obj.get("view") is not None else None }) return _obj diff --git a/python/geoengine_openapi_client/models/vec_update.py b/python/geoengine_openapi_client/models/vec_update.py new file mode 100644 index 00000000..8d60c338 --- /dev/null +++ b/python/geoengine_openapi_client/models/vec_update.py @@ -0,0 +1,138 @@ +# coding: utf-8 + +""" + Geo Engine API + + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) + + The version of the OpenAPI document: 0.8.0 + Contact: dev@geoengine.de + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import json +import pprint +from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator +from typing import Any, List, Optional +from geoengine_openapi_client.models.plot import Plot +from geoengine_openapi_client.models.project_update_token import ProjectUpdateToken +from pydantic import StrictStr, Field +from typing import Union, List, Set, Optional, Dict +from typing_extensions import Literal, Self + +VECUPDATE_ONE_OF_SCHEMAS = ["Plot", "ProjectUpdateToken"] + +class VecUpdate(BaseModel): + """ + VecUpdate + """ + # data type: ProjectUpdateToken + oneof_schema_1_validator: Optional[ProjectUpdateToken] = None + # data type: Plot + oneof_schema_2_validator: Optional[Plot] = None + actual_instance: Optional[Union[Plot, ProjectUpdateToken]] = None + one_of_schemas: Set[str] = { "Plot", "ProjectUpdateToken" } + + model_config = ConfigDict( + validate_assignment=True, + protected_namespaces=(), + ) + + + def __init__(self, *args, **kwargs) -> None: + if args: + if len(args) > 1: + raise ValueError("If a position argument is used, only 1 is allowed to set `actual_instance`") + if kwargs: + raise ValueError("If a position argument is used, keyword arguments cannot be used.") + super().__init__(actual_instance=args[0]) + else: + super().__init__(**kwargs) + + @field_validator('actual_instance') + def actual_instance_must_validate_oneof(cls, v): + instance = VecUpdate.model_construct() + error_messages = [] + match = 0 + # validate data type: ProjectUpdateToken + if not isinstance(v, ProjectUpdateToken): + error_messages.append(f"Error! Input type `{type(v)}` is not `ProjectUpdateToken`") + else: + match += 1 + # validate data type: Plot + if not isinstance(v, Plot): + error_messages.append(f"Error! Input type `{type(v)}` is not `Plot`") + else: + match += 1 + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when setting `actual_instance` in VecUpdate with oneOf schemas: Plot, ProjectUpdateToken. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when setting `actual_instance` in VecUpdate with oneOf schemas: Plot, ProjectUpdateToken. Details: " + ", ".join(error_messages)) + else: + return v + + @classmethod + def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: + return cls.from_json(json.dumps(obj)) + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Returns the object represented by the json string""" + instance = cls.model_construct() + error_messages = [] + match = 0 + + # deserialize data into ProjectUpdateToken + try: + instance.actual_instance = ProjectUpdateToken.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Plot + try: + instance.actual_instance = Plot.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when deserializing the JSON string into VecUpdate with oneOf schemas: Plot, ProjectUpdateToken. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when deserializing the JSON string into VecUpdate with oneOf schemas: Plot, ProjectUpdateToken. Details: " + ", ".join(error_messages)) + else: + return instance + + def to_json(self) -> str: + """Returns the JSON representation of the actual instance""" + if self.actual_instance is None: + return "null" + + if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): + return self.actual_instance.to_json() + else: + return json.dumps(self.actual_instance) + + def to_dict(self) -> Optional[Union[Dict[str, Any], Plot, ProjectUpdateToken]]: + """Returns the dict representation of the actual instance""" + if self.actual_instance is None: + return None + + if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): + return self.actual_instance.to_dict() + else: + # primitive type + return self.actual_instance + + def to_str(self) -> str: + """Returns the string representation of the actual instance""" + return pprint.pformat(self.model_dump()) + + diff --git a/python/geoengine_openapi_client/models/vector_column_info.py b/python/geoengine_openapi_client/models/vector_column_info.py index 5424a65e..e330090e 100644 --- a/python/geoengine_openapi_client/models/vector_column_info.py +++ b/python/geoengine_openapi_client/models/vector_column_info.py @@ -18,61 +18,77 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List from geoengine_openapi_client.models.feature_data_type import FeatureDataType from geoengine_openapi_client.models.measurement import Measurement +from typing import Optional, Set +from typing_extensions import Self class VectorColumnInfo(BaseModel): """ VectorColumnInfo - """ - data_type: FeatureDataType = Field(..., alias="dataType") - measurement: Measurement = Field(...) - __properties = ["dataType", "measurement"] + """ # noqa: E501 + data_type: FeatureDataType = Field(alias="dataType") + measurement: Measurement + __properties: ClassVar[List[str]] = ["dataType", "measurement"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> VectorColumnInfo: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of VectorColumnInfo from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of measurement if self.measurement: _dict['measurement'] = self.measurement.to_dict() return _dict @classmethod - def from_dict(cls, obj: dict) -> VectorColumnInfo: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of VectorColumnInfo from a dict""" if obj is None: return None if not isinstance(obj, dict): - return VectorColumnInfo.parse_obj(obj) + return cls.model_validate(obj) - _obj = VectorColumnInfo.parse_obj({ - "data_type": obj.get("dataType"), - "measurement": Measurement.from_dict(obj.get("measurement")) if obj.get("measurement") is not None else None + _obj = cls.model_validate({ + "dataType": obj.get("dataType"), + "measurement": Measurement.from_dict(obj["measurement"]) if obj.get("measurement") is not None else None }) return _obj diff --git a/python/geoengine_openapi_client/models/vector_data_type.py b/python/geoengine_openapi_client/models/vector_data_type.py index 38b19dd4..05a29c1b 100644 --- a/python/geoengine_openapi_client/models/vector_data_type.py +++ b/python/geoengine_openapi_client/models/vector_data_type.py @@ -13,13 +13,10 @@ """ # noqa: E501 +from __future__ import annotations import json -import pprint -import re # noqa: F401 -from aenum import Enum, no_arg - - - +from enum import Enum +from typing_extensions import Self class VectorDataType(str, Enum): @@ -36,8 +33,8 @@ class VectorDataType(str, Enum): MULTIPOLYGON = 'MultiPolygon' @classmethod - def from_json(cls, json_str: str) -> VectorDataType: + def from_json(cls, json_str: str) -> Self: """Create an instance of VectorDataType from a JSON string""" - return VectorDataType(json.loads(json_str)) + return cls(json.loads(json_str)) diff --git a/python/geoengine_openapi_client/models/vector_result_descriptor.py b/python/geoengine_openapi_client/models/vector_result_descriptor.py index 7f5c08b8..8899ce67 100644 --- a/python/geoengine_openapi_client/models/vector_result_descriptor.py +++ b/python/geoengine_openapi_client/models/vector_result_descriptor.py @@ -18,94 +18,110 @@ import re # noqa: F401 import json - -from typing import Dict, Optional -from pydantic import BaseModel, Field, StrictStr +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional from geoengine_openapi_client.models.bounding_box2_d import BoundingBox2D from geoengine_openapi_client.models.time_interval import TimeInterval from geoengine_openapi_client.models.vector_column_info import VectorColumnInfo from geoengine_openapi_client.models.vector_data_type import VectorDataType +from typing import Optional, Set +from typing_extensions import Self class VectorResultDescriptor(BaseModel): """ VectorResultDescriptor - """ + """ # noqa: E501 bbox: Optional[BoundingBox2D] = None - columns: Dict[str, VectorColumnInfo] = Field(...) - data_type: VectorDataType = Field(..., alias="dataType") - spatial_reference: StrictStr = Field(..., alias="spatialReference") + columns: Dict[str, VectorColumnInfo] + data_type: VectorDataType = Field(alias="dataType") + spatial_reference: StrictStr = Field(alias="spatialReference") time: Optional[TimeInterval] = None - __properties = ["bbox", "columns", "dataType", "spatialReference", "time"] + __properties: ClassVar[List[str]] = ["bbox", "columns", "dataType", "spatialReference", "time"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> VectorResultDescriptor: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of VectorResultDescriptor from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of bbox if self.bbox: _dict['bbox'] = self.bbox.to_dict() # override the default output from pydantic by calling `to_dict()` of each value in columns (dict) _field_dict = {} if self.columns: - for _key in self.columns: - if self.columns[_key]: - _field_dict[_key] = self.columns[_key].to_dict() + for _key_columns in self.columns: + if self.columns[_key_columns]: + _field_dict[_key_columns] = self.columns[_key_columns].to_dict() _dict['columns'] = _field_dict # override the default output from pydantic by calling `to_dict()` of time if self.time: _dict['time'] = self.time.to_dict() # set to None if bbox (nullable) is None - # and __fields_set__ contains the field - if self.bbox is None and "bbox" in self.__fields_set__: + # and model_fields_set contains the field + if self.bbox is None and "bbox" in self.model_fields_set: _dict['bbox'] = None # set to None if time (nullable) is None - # and __fields_set__ contains the field - if self.time is None and "time" in self.__fields_set__: + # and model_fields_set contains the field + if self.time is None and "time" in self.model_fields_set: _dict['time'] = None return _dict @classmethod - def from_dict(cls, obj: dict) -> VectorResultDescriptor: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of VectorResultDescriptor from a dict""" if obj is None: return None if not isinstance(obj, dict): - return VectorResultDescriptor.parse_obj(obj) + return cls.model_validate(obj) - _obj = VectorResultDescriptor.parse_obj({ - "bbox": BoundingBox2D.from_dict(obj.get("bbox")) if obj.get("bbox") is not None else None, + _obj = cls.model_validate({ + "bbox": BoundingBox2D.from_dict(obj["bbox"]) if obj.get("bbox") is not None else None, "columns": dict( (_k, VectorColumnInfo.from_dict(_v)) - for _k, _v in obj.get("columns").items() + for _k, _v in obj["columns"].items() ) if obj.get("columns") is not None else None, - "data_type": obj.get("dataType"), - "spatial_reference": obj.get("spatialReference"), - "time": TimeInterval.from_dict(obj.get("time")) if obj.get("time") is not None else None + "dataType": obj.get("dataType"), + "spatialReference": obj.get("spatialReference"), + "time": TimeInterval.from_dict(obj["time"]) if obj.get("time") is not None else None }) return _obj diff --git a/python/geoengine_openapi_client/models/volume.py b/python/geoengine_openapi_client/models/volume.py index 3e2f9dbe..353ba1ce 100644 --- a/python/geoengine_openapi_client/models/volume.py +++ b/python/geoengine_openapi_client/models/volume.py @@ -18,59 +18,70 @@ import re # noqa: F401 import json - -from typing import Optional -from pydantic import BaseModel, Field, StrictStr +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self class Volume(BaseModel): """ Volume - """ - name: StrictStr = Field(...) - path: Optional[StrictStr] = None - __properties = ["name", "path"] + """ # noqa: E501 + name: StrictStr + path: StrictStr + __properties: ClassVar[List[str]] = ["name", "path"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> Volume: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of Volume from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) - # set to None if path (nullable) is None - # and __fields_set__ contains the field - if self.path is None and "path" in self.__fields_set__: - _dict['path'] = None + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> Volume: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of Volume from a dict""" if obj is None: return None if not isinstance(obj, dict): - return Volume.parse_obj(obj) + return cls.model_validate(obj) - _obj = Volume.parse_obj({ + _obj = cls.model_validate({ "name": obj.get("name"), "path": obj.get("path") }) diff --git a/python/geoengine_openapi_client/models/volume_file_layers_response.py b/python/geoengine_openapi_client/models/volume_file_layers_response.py index 05e8f7a7..13c6bc87 100644 --- a/python/geoengine_openapi_client/models/volume_file_layers_response.py +++ b/python/geoengine_openapi_client/models/volume_file_layers_response.py @@ -18,53 +18,69 @@ import re # noqa: F401 import json - -from typing import List -from pydantic import BaseModel, Field, StrictStr, conlist +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self class VolumeFileLayersResponse(BaseModel): """ VolumeFileLayersResponse - """ - layers: conlist(StrictStr) = Field(...) - __properties = ["layers"] + """ # noqa: E501 + layers: List[StrictStr] + __properties: ClassVar[List[str]] = ["layers"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> VolumeFileLayersResponse: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of VolumeFileLayersResponse from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> VolumeFileLayersResponse: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of VolumeFileLayersResponse from a dict""" if obj is None: return None if not isinstance(obj, dict): - return VolumeFileLayersResponse.parse_obj(obj) + return cls.model_validate(obj) - _obj = VolumeFileLayersResponse.parse_obj({ + _obj = cls.model_validate({ "layers": obj.get("layers") }) return _obj diff --git a/python/geoengine_openapi_client/models/wcs_boundingbox.py b/python/geoengine_openapi_client/models/wcs_boundingbox.py index 6b6fade3..319945c1 100644 --- a/python/geoengine_openapi_client/models/wcs_boundingbox.py +++ b/python/geoengine_openapi_client/models/wcs_boundingbox.py @@ -18,59 +18,75 @@ import re # noqa: F401 import json - -from typing import List, Optional, Union -from pydantic import BaseModel, Field, StrictFloat, StrictInt, StrictStr, conlist +from pydantic import BaseModel, ConfigDict, StrictFloat, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional, Union +from typing import Optional, Set +from typing_extensions import Self class WcsBoundingbox(BaseModel): """ WcsBoundingbox - """ - bbox: conlist(Union[StrictFloat, StrictInt]) = Field(...) + """ # noqa: E501 + bbox: List[Union[StrictFloat, StrictInt]] spatial_reference: Optional[StrictStr] = None - __properties = ["bbox", "spatial_reference"] + __properties: ClassVar[List[str]] = ["bbox", "spatial_reference"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> WcsBoundingbox: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of WcsBoundingbox from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # set to None if spatial_reference (nullable) is None - # and __fields_set__ contains the field - if self.spatial_reference is None and "spatial_reference" in self.__fields_set__: + # and model_fields_set contains the field + if self.spatial_reference is None and "spatial_reference" in self.model_fields_set: _dict['spatial_reference'] = None return _dict @classmethod - def from_dict(cls, obj: dict) -> WcsBoundingbox: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of WcsBoundingbox from a dict""" if obj is None: return None if not isinstance(obj, dict): - return WcsBoundingbox.parse_obj(obj) + return cls.model_validate(obj) - _obj = WcsBoundingbox.parse_obj({ + _obj = cls.model_validate({ "bbox": obj.get("bbox"), "spatial_reference": obj.get("spatial_reference") }) diff --git a/python/geoengine_openapi_client/models/wcs_service.py b/python/geoengine_openapi_client/models/wcs_service.py index 661f52d8..3a8f5be9 100644 --- a/python/geoengine_openapi_client/models/wcs_service.py +++ b/python/geoengine_openapi_client/models/wcs_service.py @@ -13,13 +13,10 @@ """ # noqa: E501 +from __future__ import annotations import json -import pprint -import re # noqa: F401 -from aenum import Enum, no_arg - - - +from enum import Enum +from typing_extensions import Self class WcsService(str, Enum): @@ -33,8 +30,8 @@ class WcsService(str, Enum): WCS = 'WCS' @classmethod - def from_json(cls, json_str: str) -> WcsService: + def from_json(cls, json_str: str) -> Self: """Create an instance of WcsService from a JSON string""" - return WcsService(json.loads(json_str)) + return cls(json.loads(json_str)) diff --git a/python/geoengine_openapi_client/models/wcs_version.py b/python/geoengine_openapi_client/models/wcs_version.py index 238f78ba..abd0d0bd 100644 --- a/python/geoengine_openapi_client/models/wcs_version.py +++ b/python/geoengine_openapi_client/models/wcs_version.py @@ -13,13 +13,10 @@ """ # noqa: E501 +from __future__ import annotations import json -import pprint -import re # noqa: F401 -from aenum import Enum, no_arg - - - +from enum import Enum +from typing_extensions import Self class WcsVersion(str, Enum): @@ -34,8 +31,8 @@ class WcsVersion(str, Enum): ENUM_1_DOT_1_DOT_1 = '1.1.1' @classmethod - def from_json(cls, json_str: str) -> WcsVersion: + def from_json(cls, json_str: str) -> Self: """Create an instance of WcsVersion from a JSON string""" - return WcsVersion(json.loads(json_str)) + return cls(json.loads(json_str)) diff --git a/python/geoengine_openapi_client/models/wfs_service.py b/python/geoengine_openapi_client/models/wfs_service.py index e6827212..5cfad687 100644 --- a/python/geoengine_openapi_client/models/wfs_service.py +++ b/python/geoengine_openapi_client/models/wfs_service.py @@ -13,13 +13,10 @@ """ # noqa: E501 +from __future__ import annotations import json -import pprint -import re # noqa: F401 -from aenum import Enum, no_arg - - - +from enum import Enum +from typing_extensions import Self class WfsService(str, Enum): @@ -33,8 +30,8 @@ class WfsService(str, Enum): WFS = 'WFS' @classmethod - def from_json(cls, json_str: str) -> WfsService: + def from_json(cls, json_str: str) -> Self: """Create an instance of WfsService from a JSON string""" - return WfsService(json.loads(json_str)) + return cls(json.loads(json_str)) diff --git a/python/geoengine_openapi_client/models/wfs_version.py b/python/geoengine_openapi_client/models/wfs_version.py index 2c278913..8656db7d 100644 --- a/python/geoengine_openapi_client/models/wfs_version.py +++ b/python/geoengine_openapi_client/models/wfs_version.py @@ -13,13 +13,10 @@ """ # noqa: E501 +from __future__ import annotations import json -import pprint -import re # noqa: F401 -from aenum import Enum, no_arg - - - +from enum import Enum +from typing_extensions import Self class WfsVersion(str, Enum): @@ -33,8 +30,8 @@ class WfsVersion(str, Enum): ENUM_2_DOT_0_DOT_0 = '2.0.0' @classmethod - def from_json(cls, json_str: str) -> WfsVersion: + def from_json(cls, json_str: str) -> Self: """Create an instance of WfsVersion from a JSON string""" - return WfsVersion(json.loads(json_str)) + return cls(json.loads(json_str)) diff --git a/python/geoengine_openapi_client/models/wms_service.py b/python/geoengine_openapi_client/models/wms_service.py index e17d8633..235a1148 100644 --- a/python/geoengine_openapi_client/models/wms_service.py +++ b/python/geoengine_openapi_client/models/wms_service.py @@ -13,13 +13,10 @@ """ # noqa: E501 +from __future__ import annotations import json -import pprint -import re # noqa: F401 -from aenum import Enum, no_arg - - - +from enum import Enum +from typing_extensions import Self class WmsService(str, Enum): @@ -33,8 +30,8 @@ class WmsService(str, Enum): WMS = 'WMS' @classmethod - def from_json(cls, json_str: str) -> WmsService: + def from_json(cls, json_str: str) -> Self: """Create an instance of WmsService from a JSON string""" - return WmsService(json.loads(json_str)) + return cls(json.loads(json_str)) diff --git a/python/geoengine_openapi_client/models/wms_version.py b/python/geoengine_openapi_client/models/wms_version.py index 019f76f3..6e1e52fc 100644 --- a/python/geoengine_openapi_client/models/wms_version.py +++ b/python/geoengine_openapi_client/models/wms_version.py @@ -13,13 +13,10 @@ """ # noqa: E501 +from __future__ import annotations import json -import pprint -import re # noqa: F401 -from aenum import Enum, no_arg - - - +from enum import Enum +from typing_extensions import Self class WmsVersion(str, Enum): @@ -33,8 +30,8 @@ class WmsVersion(str, Enum): ENUM_1_DOT_3_DOT_0 = '1.3.0' @classmethod - def from_json(cls, json_str: str) -> WmsVersion: + def from_json(cls, json_str: str) -> Self: """Create an instance of WmsVersion from a JSON string""" - return WmsVersion(json.loads(json_str)) + return cls(json.loads(json_str)) diff --git a/python/geoengine_openapi_client/models/workflow.py b/python/geoengine_openapi_client/models/workflow.py index 2809777e..2ce9cd49 100644 --- a/python/geoengine_openapi_client/models/workflow.py +++ b/python/geoengine_openapi_client/models/workflow.py @@ -18,66 +18,82 @@ import re # noqa: F401 import json - - -from pydantic import BaseModel, Field, StrictStr, validator +from pydantic import BaseModel, ConfigDict, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List from geoengine_openapi_client.models.typed_operator_operator import TypedOperatorOperator +from typing import Optional, Set +from typing_extensions import Self class Workflow(BaseModel): """ Workflow - """ - operator: TypedOperatorOperator = Field(...) - type: StrictStr = Field(...) - __properties = ["operator", "type"] + """ # noqa: E501 + operator: TypedOperatorOperator + type: StrictStr + __properties: ClassVar[List[str]] = ["operator", "type"] - @validator('type') + @field_validator('type') def type_validate_enum(cls, value): """Validates the enum""" - if value not in ('Vector', 'Raster', 'Plot'): + if value not in set(['Vector', 'Raster', 'Plot']): raise ValueError("must be one of enum values ('Vector', 'Raster', 'Plot')") return value - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> Workflow: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of Workflow from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) # override the default output from pydantic by calling `to_dict()` of operator if self.operator: _dict['operator'] = self.operator.to_dict() return _dict @classmethod - def from_dict(cls, obj: dict) -> Workflow: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of Workflow from a dict""" if obj is None: return None if not isinstance(obj, dict): - return Workflow.parse_obj(obj) + return cls.model_validate(obj) - _obj = Workflow.parse_obj({ - "operator": TypedOperatorOperator.from_dict(obj.get("operator")) if obj.get("operator") is not None else None, + _obj = cls.model_validate({ + "operator": TypedOperatorOperator.from_dict(obj["operator"]) if obj.get("operator") is not None else None, "type": obj.get("type") }) return _obj diff --git a/python/geoengine_openapi_client/models/wrapped_plot_output.py b/python/geoengine_openapi_client/models/wrapped_plot_output.py index 0718fda1..1865374e 100644 --- a/python/geoengine_openapi_client/models/wrapped_plot_output.py +++ b/python/geoengine_openapi_client/models/wrapped_plot_output.py @@ -18,59 +18,75 @@ import re # noqa: F401 import json - -from typing import Any, Dict -from pydantic import BaseModel, Field, StrictStr +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List from geoengine_openapi_client.models.plot_output_format import PlotOutputFormat +from typing import Optional, Set +from typing_extensions import Self class WrappedPlotOutput(BaseModel): """ WrappedPlotOutput - """ - data: Dict[str, Any] = Field(...) - output_format: PlotOutputFormat = Field(..., alias="outputFormat") - plot_type: StrictStr = Field(..., alias="plotType") - __properties = ["data", "outputFormat", "plotType"] + """ # noqa: E501 + data: Dict[str, Any] + output_format: PlotOutputFormat = Field(alias="outputFormat") + plot_type: StrictStr = Field(alias="plotType") + __properties: ClassVar[List[str]] = ["data", "outputFormat", "plotType"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) - class Config: - """Pydantic configuration""" - allow_population_by_field_name = True - validate_assignment = True def to_str(self) -> str: """Returns the string representation of the model using alias""" - return pprint.pformat(self.dict(by_alias=True)) + return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> WrappedPlotOutput: + def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of WrappedPlotOutput from a JSON string""" return cls.from_dict(json.loads(json_str)) - def to_dict(self): - """Returns the dictionary representation of the model using alias""" - _dict = self.dict(by_alias=True, - exclude={ - }, - exclude_none=True) + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) return _dict @classmethod - def from_dict(cls, obj: dict) -> WrappedPlotOutput: + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of WrappedPlotOutput from a dict""" if obj is None: return None if not isinstance(obj, dict): - return WrappedPlotOutput.parse_obj(obj) + return cls.model_validate(obj) - _obj = WrappedPlotOutput.parse_obj({ + _obj = cls.model_validate({ "data": obj.get("data"), - "output_format": obj.get("outputFormat"), - "plot_type": obj.get("plotType") + "outputFormat": obj.get("outputFormat"), + "plotType": obj.get("plotType") }) return _obj diff --git a/python/geoengine_openapi_client/rest.py b/python/geoengine_openapi_client/rest.py index 45c90696..7f6bb656 100644 --- a/python/geoengine_openapi_client/rest.py +++ b/python/geoengine_openapi_client/rest.py @@ -15,43 +15,55 @@ import io import json -import logging import re import ssl -from urllib.parse import urlencode, quote_plus import urllib3 -from geoengine_openapi_client.exceptions import ApiException, UnauthorizedException, ForbiddenException, NotFoundException, ServiceException, ApiValueError, BadRequestException +from geoengine_openapi_client.exceptions import ApiException, ApiValueError +SUPPORTED_SOCKS_PROXIES = {"socks5", "socks5h", "socks4", "socks4a"} +RESTResponseType = urllib3.HTTPResponse -logger = logging.getLogger(__name__) + +def is_socks_proxy_url(url): + if url is None: + return False + split_section = url.split("://") + if len(split_section) < 2: + return False + else: + return split_section[0].lower() in SUPPORTED_SOCKS_PROXIES class RESTResponse(io.IOBase): def __init__(self, resp) -> None: - self.urllib3_response = resp + self.response = resp self.status = resp.status self.reason = resp.reason - self.data = resp.data + self.data = None + + def read(self): + if self.data is None: + self.data = self.response.data + return self.data def getheaders(self): """Returns a dictionary of the response headers.""" - return self.urllib3_response.headers + return self.response.headers def getheader(self, name, default=None): """Returns a given response header.""" - return self.urllib3_response.headers.get(name, default) + return self.response.headers.get(name, default) class RESTClientObject: - def __init__(self, configuration, pools_size=4, maxsize=None) -> None: + def __init__(self, configuration) -> None: # urllib3.PoolManager will pass all kw parameters to connectionpool # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/poolmanager.py#L75 # noqa: E501 # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/connectionpool.py#L680 # noqa: E501 - # maxsize is the number of requests to host that are allowed in parallel # noqa: E501 # Custom SSL certificates and client certificates: http://urllib3.readthedocs.io/en/latest/advanced-usage.html # noqa: E501 # cert_reqs @@ -60,74 +72,79 @@ def __init__(self, configuration, pools_size=4, maxsize=None) -> None: else: cert_reqs = ssl.CERT_NONE - addition_pool_args = {} + pool_args = { + "cert_reqs": cert_reqs, + "ca_certs": configuration.ssl_ca_cert, + "cert_file": configuration.cert_file, + "key_file": configuration.key_file, + } if configuration.assert_hostname is not None: - addition_pool_args['assert_hostname'] = configuration.assert_hostname # noqa: E501 + pool_args['assert_hostname'] = ( + configuration.assert_hostname + ) if configuration.retries is not None: - addition_pool_args['retries'] = configuration.retries + pool_args['retries'] = configuration.retries if configuration.tls_server_name: - addition_pool_args['server_hostname'] = configuration.tls_server_name + pool_args['server_hostname'] = configuration.tls_server_name if configuration.socket_options is not None: - addition_pool_args['socket_options'] = configuration.socket_options + pool_args['socket_options'] = configuration.socket_options - if maxsize is None: - if configuration.connection_pool_maxsize is not None: - maxsize = configuration.connection_pool_maxsize - else: - maxsize = 4 + if configuration.connection_pool_maxsize is not None: + pool_args['maxsize'] = configuration.connection_pool_maxsize # https pool manager + self.pool_manager: urllib3.PoolManager + if configuration.proxy: - self.pool_manager = urllib3.ProxyManager( - num_pools=pools_size, - maxsize=maxsize, - cert_reqs=cert_reqs, - ca_certs=configuration.ssl_ca_cert, - cert_file=configuration.cert_file, - key_file=configuration.key_file, - proxy_url=configuration.proxy, - proxy_headers=configuration.proxy_headers, - **addition_pool_args - ) + if is_socks_proxy_url(configuration.proxy): + from urllib3.contrib.socks import SOCKSProxyManager + pool_args["proxy_url"] = configuration.proxy + pool_args["headers"] = configuration.proxy_headers + self.pool_manager = SOCKSProxyManager(**pool_args) + else: + pool_args["proxy_url"] = configuration.proxy + pool_args["proxy_headers"] = configuration.proxy_headers + self.pool_manager = urllib3.ProxyManager(**pool_args) else: - self.pool_manager = urllib3.PoolManager( - num_pools=pools_size, - maxsize=maxsize, - cert_reqs=cert_reqs, - ca_certs=configuration.ssl_ca_cert, - cert_file=configuration.cert_file, - key_file=configuration.key_file, - **addition_pool_args - ) - - def request(self, method, url, query_params=None, headers=None, - body=None, post_params=None, _preload_content=True, - _request_timeout=None): + self.pool_manager = urllib3.PoolManager(**pool_args) + + def request( + self, + method, + url, + headers=None, + body=None, + post_params=None, + _request_timeout=None + ): """Perform requests. :param method: http request method :param url: http request url - :param query_params: query parameters in the url :param headers: http request headers :param body: request json body, for `application/json` :param post_params: request post parameters, `application/x-www-form-urlencoded` and `multipart/form-data` - :param _preload_content: if False, the urllib3.HTTPResponse object will - be returned without reading/decoding response - data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. """ method = method.upper() - assert method in ['GET', 'HEAD', 'DELETE', 'POST', 'PUT', - 'PATCH', 'OPTIONS'] + assert method in [ + 'GET', + 'HEAD', + 'DELETE', + 'POST', + 'PUT', + 'PATCH', + 'OPTIONS' + ] if post_params and body: raise ApiValueError( @@ -136,63 +153,86 @@ def request(self, method, url, query_params=None, headers=None, post_params = post_params or {} headers = headers or {} - # url already contains the URL query string - # so reset query_params to empty dict - query_params = {} timeout = None if _request_timeout: - if isinstance(_request_timeout, (int,float)): # noqa: E501,F821 + if isinstance(_request_timeout, (int, float)): timeout = urllib3.Timeout(total=_request_timeout) - elif (isinstance(_request_timeout, tuple) and - len(_request_timeout) == 2): + elif ( + isinstance(_request_timeout, tuple) + and len(_request_timeout) == 2 + ): timeout = urllib3.Timeout( - connect=_request_timeout[0], read=_request_timeout[1]) + connect=_request_timeout[0], + read=_request_timeout[1] + ) try: # For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE` if method in ['POST', 'PUT', 'PATCH', 'OPTIONS', 'DELETE']: # no content type provided or payload is json - if not headers.get('Content-Type') or re.search('json', headers['Content-Type'], re.IGNORECASE): + content_type = headers.get('Content-Type') + if ( + not content_type + or re.search('json', content_type, re.IGNORECASE) + ): request_body = None if body is not None: request_body = json.dumps(body) r = self.pool_manager.request( - method, url, + method, + url, body=request_body, - preload_content=_preload_content, timeout=timeout, - headers=headers) - elif headers['Content-Type'] == 'application/x-www-form-urlencoded': # noqa: E501 + headers=headers, + preload_content=False + ) + elif content_type == 'application/x-www-form-urlencoded': r = self.pool_manager.request( - method, url, + method, + url, fields=post_params, encode_multipart=False, - preload_content=_preload_content, timeout=timeout, - headers=headers) - elif headers['Content-Type'] == 'multipart/form-data': + headers=headers, + preload_content=False + ) + elif content_type == 'multipart/form-data': # must del headers['Content-Type'], or the correct # Content-Type which generated by urllib3 will be # overwritten. del headers['Content-Type'] + # Ensures that dict objects are serialized + post_params = [(a, json.dumps(b)) if isinstance(b, dict) else (a,b) for a, b in post_params] r = self.pool_manager.request( - method, url, + method, + url, fields=post_params, encode_multipart=True, - preload_content=_preload_content, timeout=timeout, - headers=headers) + headers=headers, + preload_content=False + ) # Pass a `string` parameter directly in the body to support - # other content types than Json when `body` argument is - # provided in serialized form + # other content types than JSON when `body` argument is + # provided in serialized form. elif isinstance(body, str) or isinstance(body, bytes): - request_body = body r = self.pool_manager.request( - method, url, + method, + url, + body=body, + timeout=timeout, + headers=headers, + preload_content=False + ) + elif headers['Content-Type'].startswith('text/') and isinstance(body, bool): + request_body = "true" if body else "false" + r = self.pool_manager.request( + method, + url, body=request_body, - preload_content=_preload_content, + preload_content=False, timeout=timeout, headers=headers) else: @@ -203,102 +243,16 @@ def request(self, method, url, query_params=None, headers=None, raise ApiException(status=0, reason=msg) # For `GET`, `HEAD` else: - r = self.pool_manager.request(method, url, - fields={}, - preload_content=_preload_content, - timeout=timeout, - headers=headers) + r = self.pool_manager.request( + method, + url, + fields={}, + timeout=timeout, + headers=headers, + preload_content=False + ) except urllib3.exceptions.SSLError as e: - msg = "{0}\n{1}".format(type(e).__name__, str(e)) + msg = "\n".join([type(e).__name__, str(e)]) raise ApiException(status=0, reason=msg) - if _preload_content: - r = RESTResponse(r) - - # log response body - logger.debug("response body: %s", r.data) - - if not 200 <= r.status <= 299: - if r.status == 400: - raise BadRequestException(http_resp=r) - - if r.status == 401: - raise UnauthorizedException(http_resp=r) - - if r.status == 403: - raise ForbiddenException(http_resp=r) - - if r.status == 404: - raise NotFoundException(http_resp=r) - - if 500 <= r.status <= 599: - raise ServiceException(http_resp=r) - - raise ApiException(http_resp=r) - - return r - - def get_request(self, url, headers=None, query_params=None, _preload_content=True, - _request_timeout=None): - return self.request("GET", url, - headers=headers, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - query_params=query_params) - - def head_request(self, url, headers=None, query_params=None, _preload_content=True, - _request_timeout=None): - return self.request("HEAD", url, - headers=headers, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - query_params=query_params) - - def options_request(self, url, headers=None, query_params=None, post_params=None, - body=None, _preload_content=True, _request_timeout=None): - return self.request("OPTIONS", url, - headers=headers, - query_params=query_params, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) - - def delete_request(self, url, headers=None, query_params=None, body=None, - _preload_content=True, _request_timeout=None): - return self.request("DELETE", url, - headers=headers, - query_params=query_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) - - def post_request(self, url, headers=None, query_params=None, post_params=None, - body=None, _preload_content=True, _request_timeout=None): - return self.request("POST", url, - headers=headers, - query_params=query_params, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) - - def put_request(self, url, headers=None, query_params=None, post_params=None, - body=None, _preload_content=True, _request_timeout=None): - return self.request("PUT", url, - headers=headers, - query_params=query_params, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) - - def patch_request(self, url, headers=None, query_params=None, post_params=None, - body=None, _preload_content=True, _request_timeout=None): - return self.request("PATCH", url, - headers=headers, - query_params=query_params, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) + return RESTResponse(r) diff --git a/python/pyproject.toml b/python/pyproject.toml index 99b2f209..dbf2d8bd 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -10,17 +10,21 @@ keywords = ["OpenAPI", "OpenAPI-Generator", "Geo Engine API"] include = ["geoengine_openapi_client/py.typed"] [tool.poetry.dependencies] -python = "^3.7" +python = "^3.8" -urllib3 = ">= 1.25.3" -python-dateutil = ">=2.8.2" -pydantic = "^1.10.5, <2" -aenum = ">=3.1.11" +urllib3 = ">= 1.25.3, < 3.0.0" +python-dateutil = ">= 2.8.2" +pydantic = ">= 2" +typing-extensions = ">= 4.7.1" [tool.poetry.dev-dependencies] -pytest = ">=7.2.1" -tox = ">=3.9.0" -flake8 = ">=4.0.0" +pytest = ">= 7.2.1" +pytest-cov = ">= 2.8.1" +tox = ">= 3.9.0" +flake8 = ">= 4.0.0" +types-python-dateutil = ">= 2.8.19.14" +mypy = ">= 1.5" + [build-system] requires = ["setuptools"] @@ -28,3 +32,58 @@ build-backend = "setuptools.build_meta" [tool.pylint.'MESSAGES CONTROL'] extension-pkg-whitelist = "pydantic" + +[tool.mypy] +files = [ + "geoengine_openapi_client", + #"test", # auto-generated tests + "tests", # hand-written tests +] +# TODO: enable "strict" once all these individual checks are passing +# strict = true + +# List from: https://mypy.readthedocs.io/en/stable/existing_code.html#introduce-stricter-options +warn_unused_configs = true +warn_redundant_casts = true +warn_unused_ignores = true + +## Getting these passing should be easy +strict_equality = true +extra_checks = true + +## Strongly recommend enabling this one as soon as you can +check_untyped_defs = true + +## These shouldn't be too much additional work, but may be tricky to +## get passing if you use a lot of untyped libraries +disallow_subclassing_any = true +disallow_untyped_decorators = true +disallow_any_generics = true + +### These next few are various gradations of forcing use of type annotations +#disallow_untyped_calls = true +#disallow_incomplete_defs = true +#disallow_untyped_defs = true +# +### This one isn't too hard to get passing, but return on investment is lower +#no_implicit_reexport = true +# +### This one can be tricky to get passing if you use a lot of untyped libraries +#warn_return_any = true + +[[tool.mypy.overrides]] +module = [ + "geoengine_openapi_client.configuration", +] +warn_unused_ignores = true +strict_equality = true +extra_checks = true +check_untyped_defs = true +disallow_subclassing_any = true +disallow_untyped_decorators = true +disallow_any_generics = true +disallow_untyped_calls = true +disallow_incomplete_defs = true +disallow_untyped_defs = true +no_implicit_reexport = true +warn_return_any = true diff --git a/python/requirements.txt b/python/requirements.txt index 258c179c..67f7f68d 100644 --- a/python/requirements.txt +++ b/python/requirements.txt @@ -1,5 +1,4 @@ -python_dateutil >= 2.5.3 -setuptools >= 21.0.0 -urllib3 >= 1.25.3, < 2.1.0 -pydantic >= 1.10.5, < 2 -aenum >= 3.1.11 +urllib3 >= 1.25.3, < 3.0.0 +python_dateutil >= 2.8.2 +pydantic >= 2 +typing-extensions >= 4.7.1 diff --git a/python/setup.py b/python/setup.py index 0d2bdb22..4e4b9901 100644 --- a/python/setup.py +++ b/python/setup.py @@ -23,12 +23,12 @@ # http://pypi.python.org/pypi/setuptools NAME = "geoengine-openapi-client" VERSION = "0.0.19" -PYTHON_REQUIRES = ">=3.7" +PYTHON_REQUIRES = ">= 3.8" REQUIRES = [ - "urllib3 >= 1.25.3, < 2.1.0", - "python-dateutil", - "pydantic >= 1.10.5, < 2", - "aenum" + "urllib3 >= 1.25.3, < 3.0.0", + "python-dateutil >= 2.8.2", + "pydantic >= 2", + "typing-extensions >= 4.7.1", ] setup( @@ -48,4 +48,4 @@ No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) """, # noqa: E501 package_data={"geoengine_openapi_client": ["py.typed"]}, -) +) \ No newline at end of file diff --git a/python/test-requirements.txt b/python/test-requirements.txt index 3a0d0b93..e98555c1 100644 --- a/python/test-requirements.txt +++ b/python/test-requirements.txt @@ -1,3 +1,6 @@ -pytest~=7.1.3 -pytest-cov>=2.8.1 -pytest-randomly>=3.12.0 +pytest >= 7.2.1 +pytest-cov >= 2.8.1 +tox >= 3.9.0 +flake8 >= 4.0.0 +types-python-dateutil >= 2.8.19.14 +mypy >= 1.5 diff --git a/python/test/test_add_dataset.py b/python/test/test_add_dataset.py index 3c8000fe..f834dda9 100644 --- a/python/test/test_add_dataset.py +++ b/python/test/test_add_dataset.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.add_dataset import AddDataset # noqa: E501 +from geoengine_openapi_client.models.add_dataset import AddDataset class TestAddDataset(unittest.TestCase): """AddDataset unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> AddDataset: """Test AddDataset - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `AddDataset` """ - model = AddDataset() # noqa: E501 + model = AddDataset() if include_optional: return AddDataset( description = '', diff --git a/python/test/test_add_layer.py b/python/test/test_add_layer.py index 3b5406ed..85c14728 100644 --- a/python/test/test_add_layer.py +++ b/python/test/test_add_layer.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.add_layer import AddLayer # noqa: E501 +from geoengine_openapi_client.models.add_layer import AddLayer class TestAddLayer(unittest.TestCase): """AddLayer unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> AddLayer: """Test AddLayer - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `AddLayer` """ - model = AddLayer() # noqa: E501 + model = AddLayer() if include_optional: return AddLayer( description = 'Example layer description', diff --git a/python/test/test_add_layer_collection.py b/python/test/test_add_layer_collection.py index 52efe282..6574ff45 100644 --- a/python/test/test_add_layer_collection.py +++ b/python/test/test_add_layer_collection.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.add_layer_collection import AddLayerCollection # noqa: E501 +from geoengine_openapi_client.models.add_layer_collection import AddLayerCollection class TestAddLayerCollection(unittest.TestCase): """AddLayerCollection unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> AddLayerCollection: """Test AddLayerCollection - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `AddLayerCollection` """ - model = AddLayerCollection() # noqa: E501 + model = AddLayerCollection() if include_optional: return AddLayerCollection( description = 'A description for an example collection', diff --git a/python/test/test_add_role.py b/python/test/test_add_role.py index ef34ac1f..f025f667 100644 --- a/python/test/test_add_role.py +++ b/python/test/test_add_role.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.add_role import AddRole # noqa: E501 +from geoengine_openapi_client.models.add_role import AddRole class TestAddRole(unittest.TestCase): """AddRole unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> AddRole: """Test AddRole - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `AddRole` """ - model = AddRole() # noqa: E501 + model = AddRole() if include_optional: return AddRole( name = '' diff --git a/python/test/test_auth_code_request_url.py b/python/test/test_auth_code_request_url.py index 447cf141..9a322482 100644 --- a/python/test/test_auth_code_request_url.py +++ b/python/test/test_auth_code_request_url.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.auth_code_request_url import AuthCodeRequestURL # noqa: E501 +from geoengine_openapi_client.models.auth_code_request_url import AuthCodeRequestURL class TestAuthCodeRequestURL(unittest.TestCase): """AuthCodeRequestURL unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> AuthCodeRequestURL: """Test AuthCodeRequestURL - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `AuthCodeRequestURL` """ - model = AuthCodeRequestURL() # noqa: E501 + model = AuthCodeRequestURL() if include_optional: return AuthCodeRequestURL( url = '' diff --git a/python/test/test_auth_code_response.py b/python/test/test_auth_code_response.py index 5c0b1bea..d98f1c3a 100644 --- a/python/test/test_auth_code_response.py +++ b/python/test/test_auth_code_response.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.auth_code_response import AuthCodeResponse # noqa: E501 +from geoengine_openapi_client.models.auth_code_response import AuthCodeResponse class TestAuthCodeResponse(unittest.TestCase): """AuthCodeResponse unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> AuthCodeResponse: """Test AuthCodeResponse - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `AuthCodeResponse` """ - model = AuthCodeResponse() # noqa: E501 + model = AuthCodeResponse() if include_optional: return AuthCodeResponse( code = '', diff --git a/python/test/test_auto_create_dataset.py b/python/test/test_auto_create_dataset.py index c0fc3e45..d1461734 100644 --- a/python/test/test_auto_create_dataset.py +++ b/python/test/test_auto_create_dataset.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.auto_create_dataset import AutoCreateDataset # noqa: E501 +from geoengine_openapi_client.models.auto_create_dataset import AutoCreateDataset class TestAutoCreateDataset(unittest.TestCase): """AutoCreateDataset unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> AutoCreateDataset: """Test AutoCreateDataset - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `AutoCreateDataset` """ - model = AutoCreateDataset() # noqa: E501 + model = AutoCreateDataset() if include_optional: return AutoCreateDataset( dataset_description = '', diff --git a/python/test/test_axis_order.py b/python/test/test_axis_order.py index f5bb91c4..523458af 100644 --- a/python/test/test_axis_order.py +++ b/python/test/test_axis_order.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.axis_order import AxisOrder # noqa: E501 +from geoengine_openapi_client.models.axis_order import AxisOrder class TestAxisOrder(unittest.TestCase): """AxisOrder unit test stubs""" diff --git a/python/test/test_bounding_box2_d.py b/python/test/test_bounding_box2_d.py index c6614ac1..1b8e8327 100644 --- a/python/test/test_bounding_box2_d.py +++ b/python/test/test_bounding_box2_d.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.bounding_box2_d import BoundingBox2D # noqa: E501 +from geoengine_openapi_client.models.bounding_box2_d import BoundingBox2D class TestBoundingBox2D(unittest.TestCase): """BoundingBox2D unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> BoundingBox2D: """Test BoundingBox2D - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `BoundingBox2D` """ - model = BoundingBox2D() # noqa: E501 + model = BoundingBox2D() if include_optional: return BoundingBox2D( lower_left_coordinate = geoengine_openapi_client.models.coordinate2_d.Coordinate2D( diff --git a/python/test/test_breakpoint.py b/python/test/test_breakpoint.py index 9cf896fb..54549fc6 100644 --- a/python/test/test_breakpoint.py +++ b/python/test/test_breakpoint.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.breakpoint import Breakpoint # noqa: E501 +from geoengine_openapi_client.models.breakpoint import Breakpoint class TestBreakpoint(unittest.TestCase): """Breakpoint unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> Breakpoint: """Test Breakpoint - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `Breakpoint` """ - model = Breakpoint() # noqa: E501 + model = Breakpoint() if include_optional: return Breakpoint( color = [ diff --git a/python/test/test_classification_measurement.py b/python/test/test_classification_measurement.py index 176e62d5..b4271c7f 100644 --- a/python/test/test_classification_measurement.py +++ b/python/test/test_classification_measurement.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.classification_measurement import ClassificationMeasurement # noqa: E501 +from geoengine_openapi_client.models.classification_measurement import ClassificationMeasurement class TestClassificationMeasurement(unittest.TestCase): """ClassificationMeasurement unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> ClassificationMeasurement: """Test ClassificationMeasurement - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `ClassificationMeasurement` """ - model = ClassificationMeasurement() # noqa: E501 + model = ClassificationMeasurement() if include_optional: return ClassificationMeasurement( classes = { diff --git a/python/test/test_collection_item.py b/python/test/test_collection_item.py index 9e18e955..7b357346 100644 --- a/python/test/test_collection_item.py +++ b/python/test/test_collection_item.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.collection_item import CollectionItem # noqa: E501 +from geoengine_openapi_client.models.collection_item import CollectionItem class TestCollectionItem(unittest.TestCase): """CollectionItem unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> CollectionItem: """Test CollectionItem - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `CollectionItem` """ - model = CollectionItem() # noqa: E501 + model = CollectionItem() if include_optional: return CollectionItem( description = '', diff --git a/python/test/test_collection_type.py b/python/test/test_collection_type.py index 508329ff..e7ae6ce4 100644 --- a/python/test/test_collection_type.py +++ b/python/test/test_collection_type.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.collection_type import CollectionType # noqa: E501 +from geoengine_openapi_client.models.collection_type import CollectionType class TestCollectionType(unittest.TestCase): """CollectionType unit test stubs""" diff --git a/python/test/test_color_param.py b/python/test/test_color_param.py index 4aed92f6..49982223 100644 --- a/python/test/test_color_param.py +++ b/python/test/test_color_param.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.color_param import ColorParam # noqa: E501 +from geoengine_openapi_client.models.color_param import ColorParam class TestColorParam(unittest.TestCase): """ColorParam unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> ColorParam: """Test ColorParam - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `ColorParam` """ - model = ColorParam() # noqa: E501 + model = ColorParam() if include_optional: return ColorParam( color = [ diff --git a/python/test/test_color_param_static.py b/python/test/test_color_param_static.py index b8c6451f..f9a5ed86 100644 --- a/python/test/test_color_param_static.py +++ b/python/test/test_color_param_static.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.color_param_static import ColorParamStatic # noqa: E501 +from geoengine_openapi_client.models.color_param_static import ColorParamStatic class TestColorParamStatic(unittest.TestCase): """ColorParamStatic unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> ColorParamStatic: """Test ColorParamStatic - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `ColorParamStatic` """ - model = ColorParamStatic() # noqa: E501 + model = ColorParamStatic() if include_optional: return ColorParamStatic( color = [ diff --git a/python/test/test_colorizer.py b/python/test/test_colorizer.py index 19c82ab2..86d909ad 100644 --- a/python/test/test_colorizer.py +++ b/python/test/test_colorizer.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.colorizer import Colorizer # noqa: E501 +from geoengine_openapi_client.models.colorizer import Colorizer class TestColorizer(unittest.TestCase): """Colorizer unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> Colorizer: """Test Colorizer - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `Colorizer` """ - model = Colorizer() # noqa: E501 + model = Colorizer() if include_optional: return Colorizer( breakpoints = [ diff --git a/python/test/test_computation_quota.py b/python/test/test_computation_quota.py index 8b929f82..c8cf959f 100644 --- a/python/test/test_computation_quota.py +++ b/python/test/test_computation_quota.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.computation_quota import ComputationQuota # noqa: E501 +from geoengine_openapi_client.models.computation_quota import ComputationQuota class TestComputationQuota(unittest.TestCase): """ComputationQuota unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> ComputationQuota: """Test ComputationQuota - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `ComputationQuota` """ - model = ComputationQuota() # noqa: E501 + model = ComputationQuota() if include_optional: return ComputationQuota( computation_id = '', diff --git a/python/test/test_continuous_measurement.py b/python/test/test_continuous_measurement.py index ad4a7a6e..df9fcdbb 100644 --- a/python/test/test_continuous_measurement.py +++ b/python/test/test_continuous_measurement.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.continuous_measurement import ContinuousMeasurement # noqa: E501 +from geoengine_openapi_client.models.continuous_measurement import ContinuousMeasurement class TestContinuousMeasurement(unittest.TestCase): """ContinuousMeasurement unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> ContinuousMeasurement: """Test ContinuousMeasurement - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `ContinuousMeasurement` """ - model = ContinuousMeasurement() # noqa: E501 + model = ContinuousMeasurement() if include_optional: return ContinuousMeasurement( measurement = '', diff --git a/python/test/test_coordinate2_d.py b/python/test/test_coordinate2_d.py index 5815cd87..3c21de56 100644 --- a/python/test/test_coordinate2_d.py +++ b/python/test/test_coordinate2_d.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.coordinate2_d import Coordinate2D # noqa: E501 +from geoengine_openapi_client.models.coordinate2_d import Coordinate2D class TestCoordinate2D(unittest.TestCase): """Coordinate2D unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> Coordinate2D: """Test Coordinate2D - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `Coordinate2D` """ - model = Coordinate2D() # noqa: E501 + model = Coordinate2D() if include_optional: return Coordinate2D( x = 1.337, diff --git a/python/test/test_create_dataset.py b/python/test/test_create_dataset.py index 2ec87b20..d5f98844 100644 --- a/python/test/test_create_dataset.py +++ b/python/test/test_create_dataset.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.create_dataset import CreateDataset # noqa: E501 +from geoengine_openapi_client.models.create_dataset import CreateDataset class TestCreateDataset(unittest.TestCase): """CreateDataset unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> CreateDataset: """Test CreateDataset - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `CreateDataset` """ - model = CreateDataset() # noqa: E501 + model = CreateDataset() if include_optional: return CreateDataset( data_path = None, @@ -43,7 +42,7 @@ def make_instance(self, include_optional) -> CreateDataset: properties = geoengine_openapi_client.models.add_dataset.AddDataset( description = '', display_name = '', - name = null, + name = '', provenance = [ geoengine_openapi_client.models.provenance.Provenance( citation = '', @@ -64,7 +63,7 @@ def make_instance(self, include_optional) -> CreateDataset: properties = geoengine_openapi_client.models.add_dataset.AddDataset( description = '', display_name = '', - name = null, + name = '', provenance = [ geoengine_openapi_client.models.provenance.Provenance( citation = '', diff --git a/python/test/test_create_project.py b/python/test/test_create_project.py index 0e889e6f..68a2f855 100644 --- a/python/test/test_create_project.py +++ b/python/test/test_create_project.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.create_project import CreateProject # noqa: E501 +from geoengine_openapi_client.models.create_project import CreateProject class TestCreateProject(unittest.TestCase): """CreateProject unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> CreateProject: """Test CreateProject - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `CreateProject` """ - model = CreateProject() # noqa: E501 + model = CreateProject() if include_optional: return CreateProject( bounds = geoengine_openapi_client.models.st_rectangle.STRectangle( diff --git a/python/test/test_csv_header.py b/python/test/test_csv_header.py index de610b12..245e0c5a 100644 --- a/python/test/test_csv_header.py +++ b/python/test/test_csv_header.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.csv_header import CsvHeader # noqa: E501 +from geoengine_openapi_client.models.csv_header import CsvHeader class TestCsvHeader(unittest.TestCase): """CsvHeader unit test stubs""" diff --git a/python/test/test_data_id.py b/python/test/test_data_id.py index c10c7bc2..83519351 100644 --- a/python/test/test_data_id.py +++ b/python/test/test_data_id.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.data_id import DataId # noqa: E501 +from geoengine_openapi_client.models.data_id import DataId class TestDataId(unittest.TestCase): """DataId unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> DataId: """Test DataId - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `DataId` """ - model = DataId() # noqa: E501 + model = DataId() if include_optional: return DataId( dataset_id = '', diff --git a/python/test/test_data_path.py b/python/test/test_data_path.py index eff78ce3..70480b7d 100644 --- a/python/test/test_data_path.py +++ b/python/test/test_data_path.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.data_path import DataPath # noqa: E501 +from geoengine_openapi_client.models.data_path import DataPath class TestDataPath(unittest.TestCase): """DataPath unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> DataPath: """Test DataPath - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `DataPath` """ - model = DataPath() # noqa: E501 + model = DataPath() if include_optional: return DataPath( volume = '', diff --git a/python/test/test_data_path_one_of.py b/python/test/test_data_path_one_of.py index 1e51e8e0..69d4424e 100644 --- a/python/test/test_data_path_one_of.py +++ b/python/test/test_data_path_one_of.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.data_path_one_of import DataPathOneOf # noqa: E501 +from geoengine_openapi_client.models.data_path_one_of import DataPathOneOf class TestDataPathOneOf(unittest.TestCase): """DataPathOneOf unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> DataPathOneOf: """Test DataPathOneOf - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `DataPathOneOf` """ - model = DataPathOneOf() # noqa: E501 + model = DataPathOneOf() if include_optional: return DataPathOneOf( volume = '' diff --git a/python/test/test_data_path_one_of1.py b/python/test/test_data_path_one_of1.py index 51ad7fa0..c61e1b06 100644 --- a/python/test/test_data_path_one_of1.py +++ b/python/test/test_data_path_one_of1.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.data_path_one_of1 import DataPathOneOf1 # noqa: E501 +from geoengine_openapi_client.models.data_path_one_of1 import DataPathOneOf1 class TestDataPathOneOf1(unittest.TestCase): """DataPathOneOf1 unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> DataPathOneOf1: """Test DataPathOneOf1 - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `DataPathOneOf1` """ - model = DataPathOneOf1() # noqa: E501 + model = DataPathOneOf1() if include_optional: return DataPathOneOf1( upload = '' diff --git a/python/test/test_data_usage.py b/python/test/test_data_usage.py index 03e7d0d5..8faa5302 100644 --- a/python/test/test_data_usage.py +++ b/python/test/test_data_usage.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.data_usage import DataUsage # noqa: E501 +from geoengine_openapi_client.models.data_usage import DataUsage class TestDataUsage(unittest.TestCase): """DataUsage unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> DataUsage: """Test DataUsage - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `DataUsage` """ - model = DataUsage() # noqa: E501 + model = DataUsage() if include_optional: return DataUsage( computation_id = '', diff --git a/python/test/test_data_usage_summary.py b/python/test/test_data_usage_summary.py index c796a1d8..6964ce9e 100644 --- a/python/test/test_data_usage_summary.py +++ b/python/test/test_data_usage_summary.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.data_usage_summary import DataUsageSummary # noqa: E501 +from geoengine_openapi_client.models.data_usage_summary import DataUsageSummary class TestDataUsageSummary(unittest.TestCase): """DataUsageSummary unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> DataUsageSummary: """Test DataUsageSummary - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `DataUsageSummary` """ - model = DataUsageSummary() # noqa: E501 + model = DataUsageSummary() if include_optional: return DataUsageSummary( count = 0, diff --git a/python/test/test_dataset.py b/python/test/test_dataset.py index 01947019..e4a64c2b 100644 --- a/python/test/test_dataset.py +++ b/python/test/test_dataset.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.dataset import Dataset # noqa: E501 +from geoengine_openapi_client.models.dataset import Dataset class TestDataset(unittest.TestCase): """Dataset unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> Dataset: """Test Dataset - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `Dataset` """ - model = Dataset() # noqa: E501 + model = Dataset() if include_optional: return Dataset( description = '', diff --git a/python/test/test_dataset_definition.py b/python/test/test_dataset_definition.py index 5caa402c..f5cb24db 100644 --- a/python/test/test_dataset_definition.py +++ b/python/test/test_dataset_definition.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.dataset_definition import DatasetDefinition # noqa: E501 +from geoengine_openapi_client.models.dataset_definition import DatasetDefinition class TestDatasetDefinition(unittest.TestCase): """DatasetDefinition unit test stubs""" @@ -29,19 +28,19 @@ def tearDown(self): def make_instance(self, include_optional) -> DatasetDefinition: """Test DatasetDefinition - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `DatasetDefinition` """ - model = DatasetDefinition() # noqa: E501 + model = DatasetDefinition() if include_optional: return DatasetDefinition( meta_data = None, properties = geoengine_openapi_client.models.add_dataset.AddDataset( description = '', display_name = '', - name = null, + name = '', provenance = [ geoengine_openapi_client.models.provenance.Provenance( citation = '', @@ -60,7 +59,7 @@ def make_instance(self, include_optional) -> DatasetDefinition: properties = geoengine_openapi_client.models.add_dataset.AddDataset( description = '', display_name = '', - name = null, + name = '', provenance = [ geoengine_openapi_client.models.provenance.Provenance( citation = '', diff --git a/python/test/test_dataset_listing.py b/python/test/test_dataset_listing.py index 7e9d6f70..5e93362d 100644 --- a/python/test/test_dataset_listing.py +++ b/python/test/test_dataset_listing.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.dataset_listing import DatasetListing # noqa: E501 +from geoengine_openapi_client.models.dataset_listing import DatasetListing class TestDatasetListing(unittest.TestCase): """DatasetListing unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> DatasetListing: """Test DatasetListing - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `DatasetListing` """ - model = DatasetListing() # noqa: E501 + model = DatasetListing() if include_optional: return DatasetListing( description = '', diff --git a/python/test/test_dataset_resource.py b/python/test/test_dataset_resource.py index f85d91ab..327441b6 100644 --- a/python/test/test_dataset_resource.py +++ b/python/test/test_dataset_resource.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.dataset_resource import DatasetResource # noqa: E501 +from geoengine_openapi_client.models.dataset_resource import DatasetResource class TestDatasetResource(unittest.TestCase): """DatasetResource unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> DatasetResource: """Test DatasetResource - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `DatasetResource` """ - model = DatasetResource() # noqa: E501 + model = DatasetResource() if include_optional: return DatasetResource( id = '', diff --git a/python/test/test_datasets_api.py b/python/test/test_datasets_api.py index 2478a572..5ef40249 100644 --- a/python/test/test_datasets_api.py +++ b/python/test/test_datasets_api.py @@ -15,14 +15,14 @@ import unittest -from geoengine_openapi_client.api.datasets_api import DatasetsApi # noqa: E501 +from geoengine_openapi_client.api.datasets_api import DatasetsApi class TestDatasetsApi(unittest.TestCase): """DatasetsApi unit test stubs""" def setUp(self) -> None: - self.api = DatasetsApi() # noqa: E501 + self.api = DatasetsApi() def tearDown(self) -> None: pass @@ -30,70 +30,70 @@ def tearDown(self) -> None: def test_auto_create_dataset_handler(self) -> None: """Test case for auto_create_dataset_handler - Creates a new dataset using previously uploaded files. # noqa: E501 + Creates a new dataset using previously uploaded files. The format of the files will be automatically detected when possible. """ pass def test_create_dataset_handler(self) -> None: """Test case for create_dataset_handler - Creates a new dataset referencing files. # noqa: E501 + Creates a new dataset referencing files. Users can reference previously uploaded files. Admins can reference files from a volume. """ pass def test_delete_dataset_handler(self) -> None: """Test case for delete_dataset_handler - Delete a dataset # noqa: E501 + Delete a dataset """ pass def test_get_dataset_handler(self) -> None: """Test case for get_dataset_handler - Retrieves details about a dataset using the internal name. # noqa: E501 + Retrieves details about a dataset using the internal name. """ pass def test_get_loading_info_handler(self) -> None: """Test case for get_loading_info_handler - Retrieves the loading information of a dataset # noqa: E501 + Retrieves the loading information of a dataset """ pass def test_list_datasets_handler(self) -> None: """Test case for list_datasets_handler - Lists available datasets. # noqa: E501 + Lists available datasets. """ pass def test_list_volume_file_layers_handler(self) -> None: """Test case for list_volume_file_layers_handler - List the layers of a file in a volume. # noqa: E501 + List the layers of a file in a volume. """ pass def test_list_volumes_handler(self) -> None: """Test case for list_volumes_handler - Lists available volumes. # noqa: E501 + Lists available volumes. """ pass def test_suggest_meta_data_handler(self) -> None: """Test case for suggest_meta_data_handler - Inspects an upload and suggests metadata that can be used when creating a new dataset based on it. # noqa: E501 + Inspects an upload and suggests metadata that can be used when creating a new dataset based on it. Tries to automatically detect the main file and layer name if not specified. """ pass def test_update_dataset_handler(self) -> None: """Test case for update_dataset_handler - Update details about a dataset using the internal name. # noqa: E501 + Update details about a dataset using the internal name. """ pass @@ -106,14 +106,14 @@ def test_update_dataset_provenance_handler(self) -> None: def test_update_dataset_symbology_handler(self) -> None: """Test case for update_dataset_symbology_handler - Updates the dataset's symbology # noqa: E501 + Updates the dataset's symbology """ pass def test_update_loading_info_handler(self) -> None: """Test case for update_loading_info_handler - Updates the dataset's loading info # noqa: E501 + Updates the dataset's loading info """ pass diff --git a/python/test/test_derived_color.py b/python/test/test_derived_color.py index 874dc3a8..25ad4f7e 100644 --- a/python/test/test_derived_color.py +++ b/python/test/test_derived_color.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.derived_color import DerivedColor # noqa: E501 +from geoengine_openapi_client.models.derived_color import DerivedColor class TestDerivedColor(unittest.TestCase): """DerivedColor unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> DerivedColor: """Test DerivedColor - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `DerivedColor` """ - model = DerivedColor() # noqa: E501 + model = DerivedColor() if include_optional: return DerivedColor( attribute = '', diff --git a/python/test/test_derived_number.py b/python/test/test_derived_number.py index d2ef8aa8..ed47e622 100644 --- a/python/test/test_derived_number.py +++ b/python/test/test_derived_number.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.derived_number import DerivedNumber # noqa: E501 +from geoengine_openapi_client.models.derived_number import DerivedNumber class TestDerivedNumber(unittest.TestCase): """DerivedNumber unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> DerivedNumber: """Test DerivedNumber - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `DerivedNumber` """ - model = DerivedNumber() # noqa: E501 + model = DerivedNumber() if include_optional: return DerivedNumber( attribute = '', diff --git a/python/test/test_describe_coverage_request.py b/python/test/test_describe_coverage_request.py index e2e1145f..29b6bb02 100644 --- a/python/test/test_describe_coverage_request.py +++ b/python/test/test_describe_coverage_request.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.describe_coverage_request import DescribeCoverageRequest # noqa: E501 +from geoengine_openapi_client.models.describe_coverage_request import DescribeCoverageRequest class TestDescribeCoverageRequest(unittest.TestCase): """DescribeCoverageRequest unit test stubs""" diff --git a/python/test/test_error_response.py b/python/test/test_error_response.py index eae9a1a2..2aebb232 100644 --- a/python/test/test_error_response.py +++ b/python/test/test_error_response.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.error_response import ErrorResponse # noqa: E501 +from geoengine_openapi_client.models.error_response import ErrorResponse class TestErrorResponse(unittest.TestCase): """ErrorResponse unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> ErrorResponse: """Test ErrorResponse - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `ErrorResponse` """ - model = ErrorResponse() # noqa: E501 + model = ErrorResponse() if include_optional: return ErrorResponse( error = '', diff --git a/python/test/test_external_data_id.py b/python/test/test_external_data_id.py index e64f1a76..fffcfd73 100644 --- a/python/test/test_external_data_id.py +++ b/python/test/test_external_data_id.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.external_data_id import ExternalDataId # noqa: E501 +from geoengine_openapi_client.models.external_data_id import ExternalDataId class TestExternalDataId(unittest.TestCase): """ExternalDataId unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> ExternalDataId: """Test ExternalDataId - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `ExternalDataId` """ - model = ExternalDataId() # noqa: E501 + model = ExternalDataId() if include_optional: return ExternalDataId( layer_id = '', diff --git a/python/test/test_feature_data_type.py b/python/test/test_feature_data_type.py index a64c302c..598f5d21 100644 --- a/python/test/test_feature_data_type.py +++ b/python/test/test_feature_data_type.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.feature_data_type import FeatureDataType # noqa: E501 +from geoengine_openapi_client.models.feature_data_type import FeatureDataType class TestFeatureDataType(unittest.TestCase): """FeatureDataType unit test stubs""" diff --git a/python/test/test_file_not_found_handling.py b/python/test/test_file_not_found_handling.py index 3d0dbbed..78f1a414 100644 --- a/python/test/test_file_not_found_handling.py +++ b/python/test/test_file_not_found_handling.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.file_not_found_handling import FileNotFoundHandling # noqa: E501 +from geoengine_openapi_client.models.file_not_found_handling import FileNotFoundHandling class TestFileNotFoundHandling(unittest.TestCase): """FileNotFoundHandling unit test stubs""" diff --git a/python/test/test_format_specifics.py b/python/test/test_format_specifics.py index 47108da5..67fa0357 100644 --- a/python/test/test_format_specifics.py +++ b/python/test/test_format_specifics.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.format_specifics import FormatSpecifics # noqa: E501 +from geoengine_openapi_client.models.format_specifics import FormatSpecifics class TestFormatSpecifics(unittest.TestCase): """FormatSpecifics unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> FormatSpecifics: """Test FormatSpecifics - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `FormatSpecifics` """ - model = FormatSpecifics() # noqa: E501 + model = FormatSpecifics() if include_optional: return FormatSpecifics( csv = geoengine_openapi_client.models.format_specifics_one_of_csv.FormatSpecifics_oneOf_csv( diff --git a/python/test/test_format_specifics_one_of.py b/python/test/test_format_specifics_one_of.py index a203f31e..abe9481a 100644 --- a/python/test/test_format_specifics_one_of.py +++ b/python/test/test_format_specifics_one_of.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.format_specifics_one_of import FormatSpecificsOneOf # noqa: E501 +from geoengine_openapi_client.models.format_specifics_one_of import FormatSpecificsOneOf class TestFormatSpecificsOneOf(unittest.TestCase): """FormatSpecificsOneOf unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> FormatSpecificsOneOf: """Test FormatSpecificsOneOf - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `FormatSpecificsOneOf` """ - model = FormatSpecificsOneOf() # noqa: E501 + model = FormatSpecificsOneOf() if include_optional: return FormatSpecificsOneOf( csv = geoengine_openapi_client.models.format_specifics_one_of_csv.FormatSpecifics_oneOf_csv( diff --git a/python/test/test_format_specifics_one_of_csv.py b/python/test/test_format_specifics_one_of_csv.py index d055473c..8dfee509 100644 --- a/python/test/test_format_specifics_one_of_csv.py +++ b/python/test/test_format_specifics_one_of_csv.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.format_specifics_one_of_csv import FormatSpecificsOneOfCsv # noqa: E501 +from geoengine_openapi_client.models.format_specifics_one_of_csv import FormatSpecificsOneOfCsv class TestFormatSpecificsOneOfCsv(unittest.TestCase): """FormatSpecificsOneOfCsv unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> FormatSpecificsOneOfCsv: """Test FormatSpecificsOneOfCsv - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `FormatSpecificsOneOfCsv` """ - model = FormatSpecificsOneOfCsv() # noqa: E501 + model = FormatSpecificsOneOfCsv() if include_optional: return FormatSpecificsOneOfCsv( header = 'yes' diff --git a/python/test/test_gdal_dataset_geo_transform.py b/python/test/test_gdal_dataset_geo_transform.py index 3cf94d75..e0d99702 100644 --- a/python/test/test_gdal_dataset_geo_transform.py +++ b/python/test/test_gdal_dataset_geo_transform.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.gdal_dataset_geo_transform import GdalDatasetGeoTransform # noqa: E501 +from geoengine_openapi_client.models.gdal_dataset_geo_transform import GdalDatasetGeoTransform class TestGdalDatasetGeoTransform(unittest.TestCase): """GdalDatasetGeoTransform unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> GdalDatasetGeoTransform: """Test GdalDatasetGeoTransform - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `GdalDatasetGeoTransform` """ - model = GdalDatasetGeoTransform() # noqa: E501 + model = GdalDatasetGeoTransform() if include_optional: return GdalDatasetGeoTransform( origin_coordinate = geoengine_openapi_client.models.coordinate2_d.Coordinate2D( diff --git a/python/test/test_gdal_dataset_parameters.py b/python/test/test_gdal_dataset_parameters.py index 1c436366..0a191627 100644 --- a/python/test/test_gdal_dataset_parameters.py +++ b/python/test/test_gdal_dataset_parameters.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.gdal_dataset_parameters import GdalDatasetParameters # noqa: E501 +from geoengine_openapi_client.models.gdal_dataset_parameters import GdalDatasetParameters class TestGdalDatasetParameters(unittest.TestCase): """GdalDatasetParameters unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> GdalDatasetParameters: """Test GdalDatasetParameters - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `GdalDatasetParameters` """ - model = GdalDatasetParameters() # noqa: E501 + model = GdalDatasetParameters() if include_optional: return GdalDatasetParameters( allow_alphaband_as_mask = True, diff --git a/python/test/test_gdal_loading_info_temporal_slice.py b/python/test/test_gdal_loading_info_temporal_slice.py index 88afcf68..06f47c2b 100644 --- a/python/test/test_gdal_loading_info_temporal_slice.py +++ b/python/test/test_gdal_loading_info_temporal_slice.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.gdal_loading_info_temporal_slice import GdalLoadingInfoTemporalSlice # noqa: E501 +from geoengine_openapi_client.models.gdal_loading_info_temporal_slice import GdalLoadingInfoTemporalSlice class TestGdalLoadingInfoTemporalSlice(unittest.TestCase): """GdalLoadingInfoTemporalSlice unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> GdalLoadingInfoTemporalSlice: """Test GdalLoadingInfoTemporalSlice - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `GdalLoadingInfoTemporalSlice` """ - model = GdalLoadingInfoTemporalSlice() # noqa: E501 + model = GdalLoadingInfoTemporalSlice() if include_optional: return GdalLoadingInfoTemporalSlice( cache_ttl = 0, diff --git a/python/test/test_gdal_meta_data_list.py b/python/test/test_gdal_meta_data_list.py index eef6f8cd..42d3515f 100644 --- a/python/test/test_gdal_meta_data_list.py +++ b/python/test/test_gdal_meta_data_list.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.gdal_meta_data_list import GdalMetaDataList # noqa: E501 +from geoengine_openapi_client.models.gdal_meta_data_list import GdalMetaDataList class TestGdalMetaDataList(unittest.TestCase): """GdalMetaDataList unit test stubs""" @@ -29,18 +28,49 @@ def tearDown(self): def make_instance(self, include_optional) -> GdalMetaDataList: """Test GdalMetaDataList - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `GdalMetaDataList` """ - model = GdalMetaDataList() # noqa: E501 + model = GdalMetaDataList() if include_optional: return GdalMetaDataList( params = [ geoengine_openapi_client.models.gdal_loading_info_temporal_slice.GdalLoadingInfoTemporalSlice( cache_ttl = 0, - params = null, + params = geoengine_openapi_client.models.gdal_dataset_parameters.GdalDatasetParameters( + allow_alphaband_as_mask = True, + file_not_found_handling = 'NoData', + file_path = '', + gdal_config_options = [ + [ + '' + ] + ], + gdal_open_options = [ + '' + ], + geo_transform = geoengine_openapi_client.models.gdal_dataset_geo_transform.GdalDatasetGeoTransform( + origin_coordinate = geoengine_openapi_client.models.coordinate2_d.Coordinate2D( + x = 1.337, + y = 1.337, ), + x_pixel_size = 1.337, + y_pixel_size = 1.337, ), + height = 0, + no_data_value = 1.337, + properties_mapping = [ + geoengine_openapi_client.models.gdal_metadata_mapping.GdalMetadataMapping( + source_key = geoengine_openapi_client.models.raster_properties_key.RasterPropertiesKey( + domain = '', + key = '', ), + target_key = geoengine_openapi_client.models.raster_properties_key.RasterPropertiesKey( + domain = '', + key = '', ), + target_type = 'Number', ) + ], + rasterband_channel = 0, + width = 0, ), time = geoengine_openapi_client.models.time_interval.TimeInterval( end = 56, start = 56, ), ) @@ -51,11 +81,21 @@ def make_instance(self, include_optional) -> GdalMetaDataList: measurement = null, name = '', ) ], - bbox = null, + bbox = geoengine_openapi_client.models.spatial_partition2_d.SpatialPartition2D( + lower_right_coordinate = geoengine_openapi_client.models.coordinate2_d.Coordinate2D( + x = 1.337, + y = 1.337, ), + upper_left_coordinate = geoengine_openapi_client.models.coordinate2_d.Coordinate2D( + x = 1.337, + y = 1.337, ), ), data_type = 'U8', - resolution = null, + resolution = geoengine_openapi_client.models.spatial_resolution.SpatialResolution( + x = 1.337, + y = 1.337, ), spatial_reference = '', - time = null, ), + time = geoengine_openapi_client.models.time_interval.TimeInterval( + end = 56, + start = 56, ), ), type = 'GdalMetaDataList' ) else: @@ -63,7 +103,38 @@ def make_instance(self, include_optional) -> GdalMetaDataList: params = [ geoengine_openapi_client.models.gdal_loading_info_temporal_slice.GdalLoadingInfoTemporalSlice( cache_ttl = 0, - params = null, + params = geoengine_openapi_client.models.gdal_dataset_parameters.GdalDatasetParameters( + allow_alphaband_as_mask = True, + file_not_found_handling = 'NoData', + file_path = '', + gdal_config_options = [ + [ + '' + ] + ], + gdal_open_options = [ + '' + ], + geo_transform = geoengine_openapi_client.models.gdal_dataset_geo_transform.GdalDatasetGeoTransform( + origin_coordinate = geoengine_openapi_client.models.coordinate2_d.Coordinate2D( + x = 1.337, + y = 1.337, ), + x_pixel_size = 1.337, + y_pixel_size = 1.337, ), + height = 0, + no_data_value = 1.337, + properties_mapping = [ + geoengine_openapi_client.models.gdal_metadata_mapping.GdalMetadataMapping( + source_key = geoengine_openapi_client.models.raster_properties_key.RasterPropertiesKey( + domain = '', + key = '', ), + target_key = geoengine_openapi_client.models.raster_properties_key.RasterPropertiesKey( + domain = '', + key = '', ), + target_type = 'Number', ) + ], + rasterband_channel = 0, + width = 0, ), time = geoengine_openapi_client.models.time_interval.TimeInterval( end = 56, start = 56, ), ) @@ -74,11 +145,21 @@ def make_instance(self, include_optional) -> GdalMetaDataList: measurement = null, name = '', ) ], - bbox = null, + bbox = geoengine_openapi_client.models.spatial_partition2_d.SpatialPartition2D( + lower_right_coordinate = geoengine_openapi_client.models.coordinate2_d.Coordinate2D( + x = 1.337, + y = 1.337, ), + upper_left_coordinate = geoengine_openapi_client.models.coordinate2_d.Coordinate2D( + x = 1.337, + y = 1.337, ), ), data_type = 'U8', - resolution = null, + resolution = geoengine_openapi_client.models.spatial_resolution.SpatialResolution( + x = 1.337, + y = 1.337, ), spatial_reference = '', - time = null, ), + time = geoengine_openapi_client.models.time_interval.TimeInterval( + end = 56, + start = 56, ), ), type = 'GdalMetaDataList', ) """ diff --git a/python/test/test_gdal_meta_data_regular.py b/python/test/test_gdal_meta_data_regular.py index 7e4f4d04..1e5b317a 100644 --- a/python/test/test_gdal_meta_data_regular.py +++ b/python/test/test_gdal_meta_data_regular.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.gdal_meta_data_regular import GdalMetaDataRegular # noqa: E501 +from geoengine_openapi_client.models.gdal_meta_data_regular import GdalMetaDataRegular class TestGdalMetaDataRegular(unittest.TestCase): """GdalMetaDataRegular unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> GdalMetaDataRegular: """Test GdalMetaDataRegular - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `GdalMetaDataRegular` """ - model = GdalMetaDataRegular() # noqa: E501 + model = GdalMetaDataRegular() if include_optional: return GdalMetaDataRegular( cache_ttl = 0, @@ -79,11 +78,21 @@ def make_instance(self, include_optional) -> GdalMetaDataRegular: measurement = null, name = '', ) ], - bbox = null, + bbox = geoengine_openapi_client.models.spatial_partition2_d.SpatialPartition2D( + lower_right_coordinate = geoengine_openapi_client.models.coordinate2_d.Coordinate2D( + x = 1.337, + y = 1.337, ), + upper_left_coordinate = geoengine_openapi_client.models.coordinate2_d.Coordinate2D( + x = 1.337, + y = 1.337, ), ), data_type = 'U8', - resolution = null, + resolution = geoengine_openapi_client.models.spatial_resolution.SpatialResolution( + x = 1.337, + y = 1.337, ), spatial_reference = '', - time = null, ), + time = geoengine_openapi_client.models.time_interval.TimeInterval( + end = 56, + start = 56, ), ), step = geoengine_openapi_client.models.time_step.TimeStep( granularity = 'millis', step = 0, ), @@ -137,11 +146,21 @@ def make_instance(self, include_optional) -> GdalMetaDataRegular: measurement = null, name = '', ) ], - bbox = null, + bbox = geoengine_openapi_client.models.spatial_partition2_d.SpatialPartition2D( + lower_right_coordinate = geoengine_openapi_client.models.coordinate2_d.Coordinate2D( + x = 1.337, + y = 1.337, ), + upper_left_coordinate = geoengine_openapi_client.models.coordinate2_d.Coordinate2D( + x = 1.337, + y = 1.337, ), ), data_type = 'U8', - resolution = null, + resolution = geoengine_openapi_client.models.spatial_resolution.SpatialResolution( + x = 1.337, + y = 1.337, ), spatial_reference = '', - time = null, ), + time = geoengine_openapi_client.models.time_interval.TimeInterval( + end = 56, + start = 56, ), ), step = geoengine_openapi_client.models.time_step.TimeStep( granularity = 'millis', step = 0, ), diff --git a/python/test/test_gdal_meta_data_static.py b/python/test/test_gdal_meta_data_static.py index 597ff43b..668af720 100644 --- a/python/test/test_gdal_meta_data_static.py +++ b/python/test/test_gdal_meta_data_static.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.gdal_meta_data_static import GdalMetaDataStatic # noqa: E501 +from geoengine_openapi_client.models.gdal_meta_data_static import GdalMetaDataStatic class TestGdalMetaDataStatic(unittest.TestCase): """GdalMetaDataStatic unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> GdalMetaDataStatic: """Test GdalMetaDataStatic - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `GdalMetaDataStatic` """ - model = GdalMetaDataStatic() # noqa: E501 + model = GdalMetaDataStatic() if include_optional: return GdalMetaDataStatic( cache_ttl = 0, @@ -76,11 +75,21 @@ def make_instance(self, include_optional) -> GdalMetaDataStatic: measurement = null, name = '', ) ], - bbox = null, + bbox = geoengine_openapi_client.models.spatial_partition2_d.SpatialPartition2D( + lower_right_coordinate = geoengine_openapi_client.models.coordinate2_d.Coordinate2D( + x = 1.337, + y = 1.337, ), + upper_left_coordinate = geoengine_openapi_client.models.coordinate2_d.Coordinate2D( + x = 1.337, + y = 1.337, ), ), data_type = 'U8', - resolution = null, + resolution = geoengine_openapi_client.models.spatial_resolution.SpatialResolution( + x = 1.337, + y = 1.337, ), spatial_reference = '', - time = null, ), + time = geoengine_openapi_client.models.time_interval.TimeInterval( + end = 56, + start = 56, ), ), time = geoengine_openapi_client.models.time_interval.TimeInterval( end = 56, start = 56, ), @@ -126,11 +135,21 @@ def make_instance(self, include_optional) -> GdalMetaDataStatic: measurement = null, name = '', ) ], - bbox = null, + bbox = geoengine_openapi_client.models.spatial_partition2_d.SpatialPartition2D( + lower_right_coordinate = geoengine_openapi_client.models.coordinate2_d.Coordinate2D( + x = 1.337, + y = 1.337, ), + upper_left_coordinate = geoengine_openapi_client.models.coordinate2_d.Coordinate2D( + x = 1.337, + y = 1.337, ), ), data_type = 'U8', - resolution = null, + resolution = geoengine_openapi_client.models.spatial_resolution.SpatialResolution( + x = 1.337, + y = 1.337, ), spatial_reference = '', - time = null, ), + time = geoengine_openapi_client.models.time_interval.TimeInterval( + end = 56, + start = 56, ), ), type = 'GdalStatic', ) """ diff --git a/python/test/test_gdal_metadata_mapping.py b/python/test/test_gdal_metadata_mapping.py index c6227d89..1e0dc211 100644 --- a/python/test/test_gdal_metadata_mapping.py +++ b/python/test/test_gdal_metadata_mapping.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.gdal_metadata_mapping import GdalMetadataMapping # noqa: E501 +from geoengine_openapi_client.models.gdal_metadata_mapping import GdalMetadataMapping class TestGdalMetadataMapping(unittest.TestCase): """GdalMetadataMapping unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> GdalMetadataMapping: """Test GdalMetadataMapping - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `GdalMetadataMapping` """ - model = GdalMetadataMapping() # noqa: E501 + model = GdalMetadataMapping() if include_optional: return GdalMetadataMapping( source_key = geoengine_openapi_client.models.raster_properties_key.RasterPropertiesKey( diff --git a/python/test/test_gdal_metadata_net_cdf_cf.py b/python/test/test_gdal_metadata_net_cdf_cf.py index 241b60fa..0bcc92a1 100644 --- a/python/test/test_gdal_metadata_net_cdf_cf.py +++ b/python/test/test_gdal_metadata_net_cdf_cf.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.gdal_metadata_net_cdf_cf import GdalMetadataNetCdfCf # noqa: E501 +from geoengine_openapi_client.models.gdal_metadata_net_cdf_cf import GdalMetadataNetCdfCf class TestGdalMetadataNetCdfCf(unittest.TestCase): """GdalMetadataNetCdfCf unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> GdalMetadataNetCdfCf: """Test GdalMetadataNetCdfCf - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `GdalMetadataNetCdfCf` """ - model = GdalMetadataNetCdfCf() # noqa: E501 + model = GdalMetadataNetCdfCf() if include_optional: return GdalMetadataNetCdfCf( band_offset = 0, @@ -78,11 +77,21 @@ def make_instance(self, include_optional) -> GdalMetadataNetCdfCf: measurement = null, name = '', ) ], - bbox = null, + bbox = geoengine_openapi_client.models.spatial_partition2_d.SpatialPartition2D( + lower_right_coordinate = geoengine_openapi_client.models.coordinate2_d.Coordinate2D( + x = 1.337, + y = 1.337, ), + upper_left_coordinate = geoengine_openapi_client.models.coordinate2_d.Coordinate2D( + x = 1.337, + y = 1.337, ), ), data_type = 'U8', - resolution = null, + resolution = geoengine_openapi_client.models.spatial_resolution.SpatialResolution( + x = 1.337, + y = 1.337, ), spatial_reference = '', - time = null, ), + time = geoengine_openapi_client.models.time_interval.TimeInterval( + end = 56, + start = 56, ), ), start = 56, step = geoengine_openapi_client.models.time_step.TimeStep( granularity = 'millis', @@ -131,11 +140,21 @@ def make_instance(self, include_optional) -> GdalMetadataNetCdfCf: measurement = null, name = '', ) ], - bbox = null, + bbox = geoengine_openapi_client.models.spatial_partition2_d.SpatialPartition2D( + lower_right_coordinate = geoengine_openapi_client.models.coordinate2_d.Coordinate2D( + x = 1.337, + y = 1.337, ), + upper_left_coordinate = geoengine_openapi_client.models.coordinate2_d.Coordinate2D( + x = 1.337, + y = 1.337, ), ), data_type = 'U8', - resolution = null, + resolution = geoengine_openapi_client.models.spatial_resolution.SpatialResolution( + x = 1.337, + y = 1.337, ), spatial_reference = '', - time = null, ), + time = geoengine_openapi_client.models.time_interval.TimeInterval( + end = 56, + start = 56, ), ), start = 56, step = geoengine_openapi_client.models.time_step.TimeStep( granularity = 'millis', diff --git a/python/test/test_gdal_source_time_placeholder.py b/python/test/test_gdal_source_time_placeholder.py index 66a0487d..83f7093a 100644 --- a/python/test/test_gdal_source_time_placeholder.py +++ b/python/test/test_gdal_source_time_placeholder.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.gdal_source_time_placeholder import GdalSourceTimePlaceholder # noqa: E501 +from geoengine_openapi_client.models.gdal_source_time_placeholder import GdalSourceTimePlaceholder class TestGdalSourceTimePlaceholder(unittest.TestCase): """GdalSourceTimePlaceholder unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> GdalSourceTimePlaceholder: """Test GdalSourceTimePlaceholder - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `GdalSourceTimePlaceholder` """ - model = GdalSourceTimePlaceholder() # noqa: E501 + model = GdalSourceTimePlaceholder() if include_optional: return GdalSourceTimePlaceholder( format = '', diff --git a/python/test/test_general_api.py b/python/test/test_general_api.py index ccaafa94..fc634733 100644 --- a/python/test/test_general_api.py +++ b/python/test/test_general_api.py @@ -15,14 +15,14 @@ import unittest -from geoengine_openapi_client.api.general_api import GeneralApi # noqa: E501 +from geoengine_openapi_client.api.general_api import GeneralApi class TestGeneralApi(unittest.TestCase): """GeneralApi unit test stubs""" def setUp(self) -> None: - self.api = GeneralApi() # noqa: E501 + self.api = GeneralApi() def tearDown(self) -> None: pass @@ -30,14 +30,14 @@ def tearDown(self) -> None: def test_available_handler(self) -> None: """Test case for available_handler - Server availablity check. # noqa: E501 + Server availablity check. """ pass def test_server_info_handler(self) -> None: """Test case for server_info_handler - Shows information about the server software version. # noqa: E501 + Shows information about the server software version. """ pass diff --git a/python/test/test_geo_json.py b/python/test/test_geo_json.py index 90859b3e..8858b6d4 100644 --- a/python/test/test_geo_json.py +++ b/python/test/test_geo_json.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.geo_json import GeoJson # noqa: E501 +from geoengine_openapi_client.models.geo_json import GeoJson class TestGeoJson(unittest.TestCase): """GeoJson unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> GeoJson: """Test GeoJson - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `GeoJson` """ - model = GeoJson() # noqa: E501 + model = GeoJson() if include_optional: return GeoJson( features = [ diff --git a/python/test/test_get_capabilities_format.py b/python/test/test_get_capabilities_format.py index a64df5fb..a6ab84b4 100644 --- a/python/test/test_get_capabilities_format.py +++ b/python/test/test_get_capabilities_format.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.get_capabilities_format import GetCapabilitiesFormat # noqa: E501 +from geoengine_openapi_client.models.get_capabilities_format import GetCapabilitiesFormat class TestGetCapabilitiesFormat(unittest.TestCase): """GetCapabilitiesFormat unit test stubs""" diff --git a/python/test/test_get_capabilities_request.py b/python/test/test_get_capabilities_request.py index 5271ce9d..936f40cb 100644 --- a/python/test/test_get_capabilities_request.py +++ b/python/test/test_get_capabilities_request.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.get_capabilities_request import GetCapabilitiesRequest # noqa: E501 +from geoengine_openapi_client.models.get_capabilities_request import GetCapabilitiesRequest class TestGetCapabilitiesRequest(unittest.TestCase): """GetCapabilitiesRequest unit test stubs""" diff --git a/python/test/test_get_coverage_format.py b/python/test/test_get_coverage_format.py index 44623296..5b17b576 100644 --- a/python/test/test_get_coverage_format.py +++ b/python/test/test_get_coverage_format.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.get_coverage_format import GetCoverageFormat # noqa: E501 +from geoengine_openapi_client.models.get_coverage_format import GetCoverageFormat class TestGetCoverageFormat(unittest.TestCase): """GetCoverageFormat unit test stubs""" diff --git a/python/test/test_get_coverage_request.py b/python/test/test_get_coverage_request.py index 17b1c43e..bd8fd7de 100644 --- a/python/test/test_get_coverage_request.py +++ b/python/test/test_get_coverage_request.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.get_coverage_request import GetCoverageRequest # noqa: E501 +from geoengine_openapi_client.models.get_coverage_request import GetCoverageRequest class TestGetCoverageRequest(unittest.TestCase): """GetCoverageRequest unit test stubs""" diff --git a/python/test/test_get_feature_request.py b/python/test/test_get_feature_request.py index f29b55dd..0dbf2235 100644 --- a/python/test/test_get_feature_request.py +++ b/python/test/test_get_feature_request.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.get_feature_request import GetFeatureRequest # noqa: E501 +from geoengine_openapi_client.models.get_feature_request import GetFeatureRequest class TestGetFeatureRequest(unittest.TestCase): """GetFeatureRequest unit test stubs""" diff --git a/python/test/test_get_legend_graphic_request.py b/python/test/test_get_legend_graphic_request.py index a8b68f2b..658cc132 100644 --- a/python/test/test_get_legend_graphic_request.py +++ b/python/test/test_get_legend_graphic_request.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.get_legend_graphic_request import GetLegendGraphicRequest # noqa: E501 +from geoengine_openapi_client.models.get_legend_graphic_request import GetLegendGraphicRequest class TestGetLegendGraphicRequest(unittest.TestCase): """GetLegendGraphicRequest unit test stubs""" diff --git a/python/test/test_get_map_exception_format.py b/python/test/test_get_map_exception_format.py index f297f80d..1f7e58a5 100644 --- a/python/test/test_get_map_exception_format.py +++ b/python/test/test_get_map_exception_format.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.get_map_exception_format import GetMapExceptionFormat # noqa: E501 +from geoengine_openapi_client.models.get_map_exception_format import GetMapExceptionFormat class TestGetMapExceptionFormat(unittest.TestCase): """GetMapExceptionFormat unit test stubs""" diff --git a/python/test/test_get_map_format.py b/python/test/test_get_map_format.py index 3c28a242..fea9a1b8 100644 --- a/python/test/test_get_map_format.py +++ b/python/test/test_get_map_format.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.get_map_format import GetMapFormat # noqa: E501 +from geoengine_openapi_client.models.get_map_format import GetMapFormat class TestGetMapFormat(unittest.TestCase): """GetMapFormat unit test stubs""" diff --git a/python/test/test_get_map_request.py b/python/test/test_get_map_request.py index 1e16a2de..04b8c92c 100644 --- a/python/test/test_get_map_request.py +++ b/python/test/test_get_map_request.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.get_map_request import GetMapRequest # noqa: E501 +from geoengine_openapi_client.models.get_map_request import GetMapRequest class TestGetMapRequest(unittest.TestCase): """GetMapRequest unit test stubs""" diff --git a/python/test/test_date_time.py b/python/test/test_inline_object.py similarity index 53% rename from python/test/test_date_time.py rename to python/test/test_inline_object.py index ffbb0092..f179dfd5 100644 --- a/python/test/test_date_time.py +++ b/python/test/test_inline_object.py @@ -14,12 +14,11 @@ import unittest -import datetime -from geoengine_openapi_client.models.date_time import DateTime # noqa: E501 +from geoengine_openapi_client.models.inline_object import InlineObject -class TestDateTime(unittest.TestCase): - """DateTime unit test stubs""" +class TestInlineObject(unittest.TestCase): + """InlineObject unit test stubs""" def setUp(self): pass @@ -27,26 +26,26 @@ def setUp(self): def tearDown(self): pass - def make_instance(self, include_optional) -> DateTime: - """Test DateTime - include_option is a boolean, when False only required + def make_instance(self, include_optional) -> InlineObject: + """Test InlineObject + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ - # uncomment below to create an instance of `DateTime` + # uncomment below to create an instance of `InlineObject` """ - model = DateTime() # noqa: E501 + model = InlineObject() if include_optional: - return DateTime( - datetime = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f') + return InlineObject( + url = '' ) else: - return DateTime( - datetime = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + return InlineObject( + url = '', ) """ - def testDateTime(self): - """Test DateTime""" + def testInlineObject(self): + """Test InlineObject""" # inst_req_only = self.make_instance(include_optional=False) # inst_req_and_optional = self.make_instance(include_optional=True) diff --git a/python/test/test_create_dataset_handler200_response.py b/python/test/test_inline_object1.py similarity index 52% rename from python/test/test_create_dataset_handler200_response.py rename to python/test/test_inline_object1.py index f862a07e..714a90a0 100644 --- a/python/test/test_create_dataset_handler200_response.py +++ b/python/test/test_inline_object1.py @@ -14,12 +14,11 @@ import unittest -import datetime -from geoengine_openapi_client.models.create_dataset_handler200_response import CreateDatasetHandler200Response # noqa: E501 +from geoengine_openapi_client.models.inline_object1 import InlineObject1 -class TestCreateDatasetHandler200Response(unittest.TestCase): - """CreateDatasetHandler200Response unit test stubs""" +class TestInlineObject1(unittest.TestCase): + """InlineObject1 unit test stubs""" def setUp(self): pass @@ -27,26 +26,26 @@ def setUp(self): def tearDown(self): pass - def make_instance(self, include_optional) -> CreateDatasetHandler200Response: - """Test CreateDatasetHandler200Response - include_option is a boolean, when False only required + def make_instance(self, include_optional) -> InlineObject1: + """Test InlineObject1 + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ - # uncomment below to create an instance of `CreateDatasetHandler200Response` + # uncomment below to create an instance of `InlineObject1` """ - model = CreateDatasetHandler200Response() # noqa: E501 + model = InlineObject1() if include_optional: - return CreateDatasetHandler200Response( + return InlineObject1( dataset_name = '' ) else: - return CreateDatasetHandler200Response( + return InlineObject1( dataset_name = '', ) """ - def testCreateDatasetHandler200Response(self): - """Test CreateDatasetHandler200Response""" + def testInlineObject1(self): + """Test InlineObject1""" # inst_req_only = self.make_instance(include_optional=False) # inst_req_and_optional = self.make_instance(include_optional=True) diff --git a/python/test/test_add_collection200_response.py b/python/test/test_inline_object2.py similarity index 54% rename from python/test/test_add_collection200_response.py rename to python/test/test_inline_object2.py index 69c34b75..06ff32d5 100644 --- a/python/test/test_add_collection200_response.py +++ b/python/test/test_inline_object2.py @@ -14,12 +14,11 @@ import unittest -import datetime -from geoengine_openapi_client.models.add_collection200_response import AddCollection200Response # noqa: E501 +from geoengine_openapi_client.models.inline_object2 import InlineObject2 -class TestAddCollection200Response(unittest.TestCase): - """AddCollection200Response unit test stubs""" +class TestInlineObject2(unittest.TestCase): + """InlineObject2 unit test stubs""" def setUp(self): pass @@ -27,26 +26,26 @@ def setUp(self): def tearDown(self): pass - def make_instance(self, include_optional) -> AddCollection200Response: - """Test AddCollection200Response - include_option is a boolean, when False only required + def make_instance(self, include_optional) -> InlineObject2: + """Test InlineObject2 + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ - # uncomment below to create an instance of `AddCollection200Response` + # uncomment below to create an instance of `InlineObject2` """ - model = AddCollection200Response() # noqa: E501 + model = InlineObject2() if include_optional: - return AddCollection200Response( + return InlineObject2( id = '' ) else: - return AddCollection200Response( + return InlineObject2( id = '', ) """ - def testAddCollection200Response(self): - """Test AddCollection200Response""" + def testInlineObject2(self): + """Test InlineObject2""" # inst_req_only = self.make_instance(include_optional=False) # inst_req_and_optional = self.make_instance(include_optional=True) diff --git a/python/test/test_internal_data_id.py b/python/test/test_internal_data_id.py index 6e7a9523..e3b5ea5f 100644 --- a/python/test/test_internal_data_id.py +++ b/python/test/test_internal_data_id.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.internal_data_id import InternalDataId # noqa: E501 +from geoengine_openapi_client.models.internal_data_id import InternalDataId class TestInternalDataId(unittest.TestCase): """InternalDataId unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> InternalDataId: """Test InternalDataId - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `InternalDataId` """ - model = InternalDataId() # noqa: E501 + model = InternalDataId() if include_optional: return InternalDataId( dataset_id = '', diff --git a/python/test/test_layer.py b/python/test/test_layer.py index 09cffc93..1f64627b 100644 --- a/python/test/test_layer.py +++ b/python/test/test_layer.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.layer import Layer # noqa: E501 +from geoengine_openapi_client.models.layer import Layer class TestLayer(unittest.TestCase): """Layer unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> Layer: """Test Layer - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `Layer` """ - model = Layer() # noqa: E501 + model = Layer() if include_optional: return Layer( description = '', diff --git a/python/test/test_layer_collection.py b/python/test/test_layer_collection.py index 58e531eb..d9f8a61b 100644 --- a/python/test/test_layer_collection.py +++ b/python/test/test_layer_collection.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.layer_collection import LayerCollection # noqa: E501 +from geoengine_openapi_client.models.layer_collection import LayerCollection class TestLayerCollection(unittest.TestCase): """LayerCollection unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> LayerCollection: """Test LayerCollection - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `LayerCollection` """ - model = LayerCollection() # noqa: E501 + model = LayerCollection() if include_optional: return LayerCollection( description = '', diff --git a/python/test/test_layer_collection_listing.py b/python/test/test_layer_collection_listing.py index f472b022..a44b30bb 100644 --- a/python/test/test_layer_collection_listing.py +++ b/python/test/test_layer_collection_listing.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.layer_collection_listing import LayerCollectionListing # noqa: E501 +from geoengine_openapi_client.models.layer_collection_listing import LayerCollectionListing class TestLayerCollectionListing(unittest.TestCase): """LayerCollectionListing unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> LayerCollectionListing: """Test LayerCollectionListing - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `LayerCollectionListing` """ - model = LayerCollectionListing() # noqa: E501 + model = LayerCollectionListing() if include_optional: return LayerCollectionListing( description = '', diff --git a/python/test/test_layer_collection_resource.py b/python/test/test_layer_collection_resource.py index d43b6567..57f66683 100644 --- a/python/test/test_layer_collection_resource.py +++ b/python/test/test_layer_collection_resource.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.layer_collection_resource import LayerCollectionResource # noqa: E501 +from geoengine_openapi_client.models.layer_collection_resource import LayerCollectionResource class TestLayerCollectionResource(unittest.TestCase): """LayerCollectionResource unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> LayerCollectionResource: """Test LayerCollectionResource - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `LayerCollectionResource` """ - model = LayerCollectionResource() # noqa: E501 + model = LayerCollectionResource() if include_optional: return LayerCollectionResource( id = '', diff --git a/python/test/test_layer_listing.py b/python/test/test_layer_listing.py index ac1b5d87..82625cd4 100644 --- a/python/test/test_layer_listing.py +++ b/python/test/test_layer_listing.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.layer_listing import LayerListing # noqa: E501 +from geoengine_openapi_client.models.layer_listing import LayerListing class TestLayerListing(unittest.TestCase): """LayerListing unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> LayerListing: """Test LayerListing - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `LayerListing` """ - model = LayerListing() # noqa: E501 + model = LayerListing() if include_optional: return LayerListing( description = '', diff --git a/python/test/test_layer_resource.py b/python/test/test_layer_resource.py index baa77d4d..ddc12cca 100644 --- a/python/test/test_layer_resource.py +++ b/python/test/test_layer_resource.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.layer_resource import LayerResource # noqa: E501 +from geoengine_openapi_client.models.layer_resource import LayerResource class TestLayerResource(unittest.TestCase): """LayerResource unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> LayerResource: """Test LayerResource - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `LayerResource` """ - model = LayerResource() # noqa: E501 + model = LayerResource() if include_optional: return LayerResource( id = '', diff --git a/python/test/test_layer_update.py b/python/test/test_layer_update.py deleted file mode 100644 index 64c1d5c9..00000000 --- a/python/test/test_layer_update.py +++ /dev/null @@ -1,64 +0,0 @@ -# coding: utf-8 - -""" - Geo Engine API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - - The version of the OpenAPI document: 0.8.0 - Contact: dev@geoengine.de - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -import unittest -import datetime - -from geoengine_openapi_client.models.layer_update import LayerUpdate # noqa: E501 - -class TestLayerUpdate(unittest.TestCase): - """LayerUpdate unit test stubs""" - - def setUp(self): - pass - - def tearDown(self): - pass - - def make_instance(self, include_optional) -> LayerUpdate: - """Test LayerUpdate - include_option is a boolean, when False only required - params are included, when True both required and - optional params are included """ - # uncomment below to create an instance of `LayerUpdate` - """ - model = LayerUpdate() # noqa: E501 - if include_optional: - return LayerUpdate( - name = '', - symbology = None, - visibility = geoengine_openapi_client.models.layer_visibility.LayerVisibility( - data = True, - legend = True, ), - workflow = '' - ) - else: - return LayerUpdate( - name = '', - symbology = None, - visibility = geoengine_openapi_client.models.layer_visibility.LayerVisibility( - data = True, - legend = True, ), - workflow = '', - ) - """ - - def testLayerUpdate(self): - """Test LayerUpdate""" - # inst_req_only = self.make_instance(include_optional=False) - # inst_req_and_optional = self.make_instance(include_optional=True) - -if __name__ == '__main__': - unittest.main() diff --git a/python/test/test_layer_visibility.py b/python/test/test_layer_visibility.py index 1ad762c8..3aaef2dc 100644 --- a/python/test/test_layer_visibility.py +++ b/python/test/test_layer_visibility.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.layer_visibility import LayerVisibility # noqa: E501 +from geoengine_openapi_client.models.layer_visibility import LayerVisibility class TestLayerVisibility(unittest.TestCase): """LayerVisibility unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> LayerVisibility: """Test LayerVisibility - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `LayerVisibility` """ - model = LayerVisibility() # noqa: E501 + model = LayerVisibility() if include_optional: return LayerVisibility( data = True, diff --git a/python/test/test_layers_api.py b/python/test/test_layers_api.py index bdd3e6c1..56ed3cb4 100644 --- a/python/test/test_layers_api.py +++ b/python/test/test_layers_api.py @@ -15,14 +15,14 @@ import unittest -from geoengine_openapi_client.api.layers_api import LayersApi # noqa: E501 +from geoengine_openapi_client.api.layers_api import LayersApi class TestLayersApi(unittest.TestCase): """LayersApi unit test stubs""" def setUp(self) -> None: - self.api = LayersApi() # noqa: E501 + self.api = LayersApi() def tearDown(self) -> None: pass @@ -30,70 +30,70 @@ def tearDown(self) -> None: def test_add_collection(self) -> None: """Test case for add_collection - Add a new collection to an existing collection # noqa: E501 + Add a new collection to an existing collection """ pass def test_add_existing_collection_to_collection(self) -> None: """Test case for add_existing_collection_to_collection - Add an existing collection to a collection # noqa: E501 + Add an existing collection to a collection """ pass def test_add_existing_layer_to_collection(self) -> None: """Test case for add_existing_layer_to_collection - Add an existing layer to a collection # noqa: E501 + Add an existing layer to a collection """ pass def test_add_layer(self) -> None: """Test case for add_layer - Add a new layer to a collection # noqa: E501 + Add a new layer to a collection """ pass def test_autocomplete_handler(self) -> None: """Test case for autocomplete_handler - Autocompletes the search on the contents of the collection of the given provider # noqa: E501 + Autocompletes the search on the contents of the collection of the given provider """ pass def test_layer_handler(self) -> None: """Test case for layer_handler - Retrieves the layer of the given provider # noqa: E501 + Retrieves the layer of the given provider """ pass def test_layer_to_dataset(self) -> None: """Test case for layer_to_dataset - Persist a raster layer from a provider as a dataset. # noqa: E501 + Persist a raster layer from a provider as a dataset. """ pass def test_layer_to_workflow_id_handler(self) -> None: """Test case for layer_to_workflow_id_handler - Registers a layer from a provider as a workflow and returns the workflow id # noqa: E501 + Registers a layer from a provider as a workflow and returns the workflow id """ pass def test_list_collection_handler(self) -> None: """Test case for list_collection_handler - List the contents of the collection of the given provider # noqa: E501 + List the contents of the collection of the given provider """ pass def test_list_root_collections_handler(self) -> None: """Test case for list_root_collections_handler - List all layer collections # noqa: E501 + List all layer collections """ pass @@ -106,49 +106,49 @@ def test_provider_capabilities_handler(self) -> None: def test_remove_collection(self) -> None: """Test case for remove_collection - Remove a collection # noqa: E501 + Remove a collection """ pass def test_remove_collection_from_collection(self) -> None: """Test case for remove_collection_from_collection - Delete a collection from a collection # noqa: E501 + Delete a collection from a collection """ pass def test_remove_layer(self) -> None: """Test case for remove_layer - Remove a collection # noqa: E501 + Remove a collection """ pass def test_remove_layer_from_collection(self) -> None: """Test case for remove_layer_from_collection - Remove a layer from a collection # noqa: E501 + Remove a layer from a collection """ pass def test_search_handler(self) -> None: """Test case for search_handler - Searches the contents of the collection of the given provider # noqa: E501 + Searches the contents of the collection of the given provider """ pass def test_update_collection(self) -> None: """Test case for update_collection - Update a collection # noqa: E501 + Update a collection """ pass def test_update_layer(self) -> None: """Test case for update_layer - Update a layer # noqa: E501 + Update a layer """ pass diff --git a/python/test/test_line_symbology.py b/python/test/test_line_symbology.py index d1f44793..d1a0a826 100644 --- a/python/test/test_line_symbology.py +++ b/python/test/test_line_symbology.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.line_symbology import LineSymbology # noqa: E501 +from geoengine_openapi_client.models.line_symbology import LineSymbology class TestLineSymbology(unittest.TestCase): """LineSymbology unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> LineSymbology: """Test LineSymbology - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `LineSymbology` """ - model = LineSymbology() # noqa: E501 + model = LineSymbology() if include_optional: return LineSymbology( auto_simplified = True, diff --git a/python/test/test_linear_gradient.py b/python/test/test_linear_gradient.py index 1e5b8a1d..284fc245 100644 --- a/python/test/test_linear_gradient.py +++ b/python/test/test_linear_gradient.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.linear_gradient import LinearGradient # noqa: E501 +from geoengine_openapi_client.models.linear_gradient import LinearGradient class TestLinearGradient(unittest.TestCase): """LinearGradient unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> LinearGradient: """Test LinearGradient - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `LinearGradient` """ - model = LinearGradient() # noqa: E501 + model = LinearGradient() if include_optional: return LinearGradient( breakpoints = [ diff --git a/python/test/test_logarithmic_gradient.py b/python/test/test_logarithmic_gradient.py index 71b9c63b..27d9a17a 100644 --- a/python/test/test_logarithmic_gradient.py +++ b/python/test/test_logarithmic_gradient.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.logarithmic_gradient import LogarithmicGradient # noqa: E501 +from geoengine_openapi_client.models.logarithmic_gradient import LogarithmicGradient class TestLogarithmicGradient(unittest.TestCase): """LogarithmicGradient unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> LogarithmicGradient: """Test LogarithmicGradient - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `LogarithmicGradient` """ - model = LogarithmicGradient() # noqa: E501 + model = LogarithmicGradient() if include_optional: return LogarithmicGradient( breakpoints = [ diff --git a/python/test/test_measurement.py b/python/test/test_measurement.py index a9162b97..c2f2b36a 100644 --- a/python/test/test_measurement.py +++ b/python/test/test_measurement.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.measurement import Measurement # noqa: E501 +from geoengine_openapi_client.models.measurement import Measurement class TestMeasurement(unittest.TestCase): """Measurement unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> Measurement: """Test Measurement - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `Measurement` """ - model = Measurement() # noqa: E501 + model = Measurement() if include_optional: return Measurement( type = 'unitless', diff --git a/python/test/test_meta_data_definition.py b/python/test/test_meta_data_definition.py index fa96ca0a..72f11439 100644 --- a/python/test/test_meta_data_definition.py +++ b/python/test/test_meta_data_definition.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.meta_data_definition import MetaDataDefinition # noqa: E501 +from geoengine_openapi_client.models.meta_data_definition import MetaDataDefinition class TestMetaDataDefinition(unittest.TestCase): """MetaDataDefinition unit test stubs""" @@ -29,19 +28,40 @@ def tearDown(self): def make_instance(self, include_optional) -> MetaDataDefinition: """Test MetaDataDefinition - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `MetaDataDefinition` """ - model = MetaDataDefinition() # noqa: E501 + model = MetaDataDefinition() if include_optional: return MetaDataDefinition( loading_info = geoengine_openapi_client.models.ogr_source_dataset.OgrSourceDataset( attribute_query = '', cache_ttl = 0, - columns = null, - data_type = null, + columns = geoengine_openapi_client.models.ogr_source_column_spec.OgrSourceColumnSpec( + bool = [ + '' + ], + datetime = [ + '' + ], + float = [ + '' + ], + format_specifics = null, + int = [ + '' + ], + rename = { + 'key' : '' + }, + text = [ + '' + ], + x = '', + y = '', ), + data_type = 'Data', default_geometry = null, file_name = '', force_ogr_spatial_filter = True, @@ -56,12 +76,22 @@ def make_instance(self, include_optional) -> MetaDataDefinition: measurement = null, name = '', ) ], - bbox = null, + bbox = geoengine_openapi_client.models.spatial_partition2_d.SpatialPartition2D( + lower_right_coordinate = geoengine_openapi_client.models.coordinate2_d.Coordinate2D( + x = 1.337, + y = 1.337, ), + upper_left_coordinate = geoengine_openapi_client.models.coordinate2_d.Coordinate2D( + x = 1.337, + y = 1.337, ), ), data_type = 'U8', - resolution = null, + resolution = geoengine_openapi_client.models.spatial_resolution.SpatialResolution( + x = 1.337, + y = 1.337, ), spatial_reference = '', - time = null, ), - type = 'MockMetaData', + time = geoengine_openapi_client.models.time_interval.TimeInterval( + end = 56, + start = 56, ), ), + type = 'OgrMetaData', cache_ttl = 0, data_time = geoengine_openapi_client.models.time_interval.TimeInterval( end = 56, @@ -93,8 +123,29 @@ def make_instance(self, include_optional) -> MetaDataDefinition: loading_info = geoengine_openapi_client.models.ogr_source_dataset.OgrSourceDataset( attribute_query = '', cache_ttl = 0, - columns = null, - data_type = null, + columns = geoengine_openapi_client.models.ogr_source_column_spec.OgrSourceColumnSpec( + bool = [ + '' + ], + datetime = [ + '' + ], + float = [ + '' + ], + format_specifics = null, + int = [ + '' + ], + rename = { + 'key' : '' + }, + text = [ + '' + ], + x = '', + y = '', ), + data_type = 'Data', default_geometry = null, file_name = '', force_ogr_spatial_filter = True, @@ -109,12 +160,22 @@ def make_instance(self, include_optional) -> MetaDataDefinition: measurement = null, name = '', ) ], - bbox = null, + bbox = geoengine_openapi_client.models.spatial_partition2_d.SpatialPartition2D( + lower_right_coordinate = geoengine_openapi_client.models.coordinate2_d.Coordinate2D( + x = 1.337, + y = 1.337, ), + upper_left_coordinate = geoengine_openapi_client.models.coordinate2_d.Coordinate2D( + x = 1.337, + y = 1.337, ), ), data_type = 'U8', - resolution = null, + resolution = geoengine_openapi_client.models.spatial_resolution.SpatialResolution( + x = 1.337, + y = 1.337, ), spatial_reference = '', - time = null, ), - type = 'MockMetaData', + time = geoengine_openapi_client.models.time_interval.TimeInterval( + end = 56, + start = 56, ), ), + type = 'OgrMetaData', data_time = geoengine_openapi_client.models.time_interval.TimeInterval( end = 56, start = 56, ), diff --git a/python/test/test_meta_data_suggestion.py b/python/test/test_meta_data_suggestion.py index b0069977..2e7dce2d 100644 --- a/python/test/test_meta_data_suggestion.py +++ b/python/test/test_meta_data_suggestion.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.meta_data_suggestion import MetaDataSuggestion # noqa: E501 +from geoengine_openapi_client.models.meta_data_suggestion import MetaDataSuggestion class TestMetaDataSuggestion(unittest.TestCase): """MetaDataSuggestion unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> MetaDataSuggestion: """Test MetaDataSuggestion - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `MetaDataSuggestion` """ - model = MetaDataSuggestion() # noqa: E501 + model = MetaDataSuggestion() if include_optional: return MetaDataSuggestion( layer_name = '', diff --git a/python/test/test_ml_api.py b/python/test/test_ml_api.py index b8428ae7..d1c722e8 100644 --- a/python/test/test_ml_api.py +++ b/python/test/test_ml_api.py @@ -15,14 +15,14 @@ import unittest -from geoengine_openapi_client.api.ml_api import MLApi # noqa: E501 +from geoengine_openapi_client.api.ml_api import MLApi class TestMLApi(unittest.TestCase): """MLApi unit test stubs""" def setUp(self) -> None: - self.api = MLApi() # noqa: E501 + self.api = MLApi() def tearDown(self) -> None: pass @@ -30,21 +30,21 @@ def tearDown(self) -> None: def test_add_ml_model(self) -> None: """Test case for add_ml_model - Create a new ml model. # noqa: E501 + Create a new ml model. """ pass def test_get_ml_model(self) -> None: """Test case for get_ml_model - Get ml model by name. # noqa: E501 + Get ml model by name. """ pass def test_list_ml_models(self) -> None: """Test case for list_ml_models - List ml models. # noqa: E501 + List ml models. """ pass diff --git a/python/test/test_ml_model.py b/python/test/test_ml_model.py index d93cca15..aadc36d3 100644 --- a/python/test/test_ml_model.py +++ b/python/test/test_ml_model.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.ml_model import MlModel # noqa: E501 +from geoengine_openapi_client.models.ml_model import MlModel class TestMlModel(unittest.TestCase): """MlModel unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> MlModel: """Test MlModel - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `MlModel` """ - model = MlModel() # noqa: E501 + model = MlModel() if include_optional: return MlModel( description = '', diff --git a/python/test/test_ml_model_metadata.py b/python/test/test_ml_model_metadata.py index f118df2b..492e7f20 100644 --- a/python/test/test_ml_model_metadata.py +++ b/python/test/test_ml_model_metadata.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.ml_model_metadata import MlModelMetadata # noqa: E501 +from geoengine_openapi_client.models.ml_model_metadata import MlModelMetadata class TestMlModelMetadata(unittest.TestCase): """MlModelMetadata unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> MlModelMetadata: """Test MlModelMetadata - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `MlModelMetadata` """ - model = MlModelMetadata() # noqa: E501 + model = MlModelMetadata() if include_optional: return MlModelMetadata( file_name = '', diff --git a/python/test/test_ml_model_name_response.py b/python/test/test_ml_model_name_response.py index 967aa611..9876ede1 100644 --- a/python/test/test_ml_model_name_response.py +++ b/python/test/test_ml_model_name_response.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.ml_model_name_response import MlModelNameResponse # noqa: E501 +from geoengine_openapi_client.models.ml_model_name_response import MlModelNameResponse class TestMlModelNameResponse(unittest.TestCase): """MlModelNameResponse unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> MlModelNameResponse: """Test MlModelNameResponse - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `MlModelNameResponse` """ - model = MlModelNameResponse() # noqa: E501 + model = MlModelNameResponse() if include_optional: return MlModelNameResponse( ml_model_name = '' diff --git a/python/test/test_ml_model_resource.py b/python/test/test_ml_model_resource.py index 6ff90d5c..9eb71960 100644 --- a/python/test/test_ml_model_resource.py +++ b/python/test/test_ml_model_resource.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.ml_model_resource import MlModelResource # noqa: E501 +from geoengine_openapi_client.models.ml_model_resource import MlModelResource class TestMlModelResource(unittest.TestCase): """MlModelResource unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> MlModelResource: """Test MlModelResource - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `MlModelResource` """ - model = MlModelResource() # noqa: E501 + model = MlModelResource() if include_optional: return MlModelResource( id = '', diff --git a/python/test/test_mock_dataset_data_source_loading_info.py b/python/test/test_mock_dataset_data_source_loading_info.py index 6482c2c6..9b432667 100644 --- a/python/test/test_mock_dataset_data_source_loading_info.py +++ b/python/test/test_mock_dataset_data_source_loading_info.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.mock_dataset_data_source_loading_info import MockDatasetDataSourceLoadingInfo # noqa: E501 +from geoengine_openapi_client.models.mock_dataset_data_source_loading_info import MockDatasetDataSourceLoadingInfo class TestMockDatasetDataSourceLoadingInfo(unittest.TestCase): """MockDatasetDataSourceLoadingInfo unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> MockDatasetDataSourceLoadingInfo: """Test MockDatasetDataSourceLoadingInfo - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `MockDatasetDataSourceLoadingInfo` """ - model = MockDatasetDataSourceLoadingInfo() # noqa: E501 + model = MockDatasetDataSourceLoadingInfo() if include_optional: return MockDatasetDataSourceLoadingInfo( points = [ diff --git a/python/test/test_mock_meta_data.py b/python/test/test_mock_meta_data.py deleted file mode 100644 index 13a61c98..00000000 --- a/python/test/test_mock_meta_data.py +++ /dev/null @@ -1,86 +0,0 @@ -# coding: utf-8 - -""" - Geo Engine API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - - The version of the OpenAPI document: 0.8.0 - Contact: dev@geoengine.de - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -import unittest -import datetime - -from geoengine_openapi_client.models.mock_meta_data import MockMetaData # noqa: E501 - -class TestMockMetaData(unittest.TestCase): - """MockMetaData unit test stubs""" - - def setUp(self): - pass - - def tearDown(self): - pass - - def make_instance(self, include_optional) -> MockMetaData: - """Test MockMetaData - include_option is a boolean, when False only required - params are included, when True both required and - optional params are included """ - # uncomment below to create an instance of `MockMetaData` - """ - model = MockMetaData() # noqa: E501 - if include_optional: - return MockMetaData( - loading_info = geoengine_openapi_client.models.mock_dataset_data_source_loading_info.MockDatasetDataSourceLoadingInfo( - points = [ - geoengine_openapi_client.models.coordinate2_d.Coordinate2D( - x = 1.337, - y = 1.337, ) - ], ), - result_descriptor = geoengine_openapi_client.models.vector_result_descriptor.VectorResultDescriptor( - bbox = null, - columns = { - 'key' : geoengine_openapi_client.models.vector_column_info.VectorColumnInfo( - data_type = 'category', - measurement = null, ) - }, - data_type = 'Data', - spatial_reference = '', - time = null, ), - type = 'MockMetaData' - ) - else: - return MockMetaData( - loading_info = geoengine_openapi_client.models.mock_dataset_data_source_loading_info.MockDatasetDataSourceLoadingInfo( - points = [ - geoengine_openapi_client.models.coordinate2_d.Coordinate2D( - x = 1.337, - y = 1.337, ) - ], ), - result_descriptor = geoengine_openapi_client.models.vector_result_descriptor.VectorResultDescriptor( - bbox = null, - columns = { - 'key' : geoengine_openapi_client.models.vector_column_info.VectorColumnInfo( - data_type = 'category', - measurement = null, ) - }, - data_type = 'Data', - spatial_reference = '', - time = null, ), - type = 'MockMetaData', - ) - """ - - def testMockMetaData(self): - """Test MockMetaData""" - # inst_req_only = self.make_instance(include_optional=False) - # inst_req_and_optional = self.make_instance(include_optional=True) - -if __name__ == '__main__': - unittest.main() diff --git a/python/test/test_multi_band_raster_colorizer.py b/python/test/test_multi_band_raster_colorizer.py index 5d589978..0e44a6cd 100644 --- a/python/test/test_multi_band_raster_colorizer.py +++ b/python/test/test_multi_band_raster_colorizer.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.multi_band_raster_colorizer import MultiBandRasterColorizer # noqa: E501 +from geoengine_openapi_client.models.multi_band_raster_colorizer import MultiBandRasterColorizer class TestMultiBandRasterColorizer(unittest.TestCase): """MultiBandRasterColorizer unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> MultiBandRasterColorizer: """Test MultiBandRasterColorizer - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `MultiBandRasterColorizer` """ - model = MultiBandRasterColorizer() # noqa: E501 + model = MultiBandRasterColorizer() if include_optional: return MultiBandRasterColorizer( blue_band = 0, diff --git a/python/test/test_multi_line_string.py b/python/test/test_multi_line_string.py index 1534c868..f0561181 100644 --- a/python/test/test_multi_line_string.py +++ b/python/test/test_multi_line_string.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.multi_line_string import MultiLineString # noqa: E501 +from geoengine_openapi_client.models.multi_line_string import MultiLineString class TestMultiLineString(unittest.TestCase): """MultiLineString unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> MultiLineString: """Test MultiLineString - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `MultiLineString` """ - model = MultiLineString() # noqa: E501 + model = MultiLineString() if include_optional: return MultiLineString( coordinates = [ diff --git a/python/test/test_multi_point.py b/python/test/test_multi_point.py index cc6d7aae..a78f2c51 100644 --- a/python/test/test_multi_point.py +++ b/python/test/test_multi_point.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.multi_point import MultiPoint # noqa: E501 +from geoengine_openapi_client.models.multi_point import MultiPoint class TestMultiPoint(unittest.TestCase): """MultiPoint unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> MultiPoint: """Test MultiPoint - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `MultiPoint` """ - model = MultiPoint() # noqa: E501 + model = MultiPoint() if include_optional: return MultiPoint( coordinates = [ diff --git a/python/test/test_multi_polygon.py b/python/test/test_multi_polygon.py index 93bc5481..c21f0b62 100644 --- a/python/test/test_multi_polygon.py +++ b/python/test/test_multi_polygon.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.multi_polygon import MultiPolygon # noqa: E501 +from geoengine_openapi_client.models.multi_polygon import MultiPolygon class TestMultiPolygon(unittest.TestCase): """MultiPolygon unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> MultiPolygon: """Test MultiPolygon - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `MultiPolygon` """ - model = MultiPolygon() # noqa: E501 + model = MultiPolygon() if include_optional: return MultiPolygon( polygons = [ diff --git a/python/test/test_number_param.py b/python/test/test_number_param.py index 84132201..bdb3d225 100644 --- a/python/test/test_number_param.py +++ b/python/test/test_number_param.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.number_param import NumberParam # noqa: E501 +from geoengine_openapi_client.models.number_param import NumberParam class TestNumberParam(unittest.TestCase): """NumberParam unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> NumberParam: """Test NumberParam - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `NumberParam` """ - model = NumberParam() # noqa: E501 + model = NumberParam() if include_optional: return NumberParam( type = 'static', diff --git a/python/test/test_ogcwcs_api.py b/python/test/test_ogcwcs_api.py index 6c2b5373..42e178ea 100644 --- a/python/test/test_ogcwcs_api.py +++ b/python/test/test_ogcwcs_api.py @@ -15,14 +15,14 @@ import unittest -from geoengine_openapi_client.api.ogcwcs_api import OGCWCSApi # noqa: E501 +from geoengine_openapi_client.api.ogcwcs_api import OGCWCSApi class TestOGCWCSApi(unittest.TestCase): """OGCWCSApi unit test stubs""" def setUp(self) -> None: - self.api = OGCWCSApi() # noqa: E501 + self.api = OGCWCSApi() def tearDown(self) -> None: pass @@ -30,21 +30,21 @@ def tearDown(self) -> None: def test_wcs_capabilities_handler(self) -> None: """Test case for wcs_capabilities_handler - Get WCS Capabilities # noqa: E501 + Get WCS Capabilities """ pass def test_wcs_describe_coverage_handler(self) -> None: """Test case for wcs_describe_coverage_handler - Get WCS Coverage Description # noqa: E501 + Get WCS Coverage Description """ pass def test_wcs_get_coverage_handler(self) -> None: """Test case for wcs_get_coverage_handler - Get WCS Coverage # noqa: E501 + Get WCS Coverage """ pass diff --git a/python/test/test_ogcwfs_api.py b/python/test/test_ogcwfs_api.py index f72c5dd8..49ecfc54 100644 --- a/python/test/test_ogcwfs_api.py +++ b/python/test/test_ogcwfs_api.py @@ -15,14 +15,14 @@ import unittest -from geoengine_openapi_client.api.ogcwfs_api import OGCWFSApi # noqa: E501 +from geoengine_openapi_client.api.ogcwfs_api import OGCWFSApi class TestOGCWFSApi(unittest.TestCase): """OGCWFSApi unit test stubs""" def setUp(self) -> None: - self.api = OGCWFSApi() # noqa: E501 + self.api = OGCWFSApi() def tearDown(self) -> None: pass @@ -30,14 +30,14 @@ def tearDown(self) -> None: def test_wfs_capabilities_handler(self) -> None: """Test case for wfs_capabilities_handler - Get WFS Capabilities # noqa: E501 + Get WFS Capabilities """ pass def test_wfs_feature_handler(self) -> None: """Test case for wfs_feature_handler - Get WCS Features # noqa: E501 + Get WCS Features """ pass diff --git a/python/test/test_ogcwms_api.py b/python/test/test_ogcwms_api.py index 02f8d606..787b92ba 100644 --- a/python/test/test_ogcwms_api.py +++ b/python/test/test_ogcwms_api.py @@ -15,14 +15,14 @@ import unittest -from geoengine_openapi_client.api.ogcwms_api import OGCWMSApi # noqa: E501 +from geoengine_openapi_client.api.ogcwms_api import OGCWMSApi class TestOGCWMSApi(unittest.TestCase): """OGCWMSApi unit test stubs""" def setUp(self) -> None: - self.api = OGCWMSApi() # noqa: E501 + self.api = OGCWMSApi() def tearDown(self) -> None: pass @@ -30,21 +30,21 @@ def tearDown(self) -> None: def test_wms_capabilities_handler(self) -> None: """Test case for wms_capabilities_handler - Get WMS Capabilities # noqa: E501 + Get WMS Capabilities """ pass def test_wms_legend_graphic_handler(self) -> None: """Test case for wms_legend_graphic_handler - Get WMS Legend Graphic # noqa: E501 + Get WMS Legend Graphic """ pass def test_wms_map_handler(self) -> None: """Test case for wms_map_handler - Get WMS Map # noqa: E501 + Get WMS Map """ pass diff --git a/python/test/test_ogr_meta_data.py b/python/test/test_ogr_meta_data.py deleted file mode 100644 index 49f6c33a..00000000 --- a/python/test/test_ogr_meta_data.py +++ /dev/null @@ -1,100 +0,0 @@ -# coding: utf-8 - -""" - Geo Engine API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - - The version of the OpenAPI document: 0.8.0 - Contact: dev@geoengine.de - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -import unittest -import datetime - -from geoengine_openapi_client.models.ogr_meta_data import OgrMetaData # noqa: E501 - -class TestOgrMetaData(unittest.TestCase): - """OgrMetaData unit test stubs""" - - def setUp(self): - pass - - def tearDown(self): - pass - - def make_instance(self, include_optional) -> OgrMetaData: - """Test OgrMetaData - include_option is a boolean, when False only required - params are included, when True both required and - optional params are included """ - # uncomment below to create an instance of `OgrMetaData` - """ - model = OgrMetaData() # noqa: E501 - if include_optional: - return OgrMetaData( - loading_info = geoengine_openapi_client.models.ogr_source_dataset.OgrSourceDataset( - attribute_query = '', - cache_ttl = 0, - columns = null, - data_type = null, - default_geometry = null, - file_name = '', - force_ogr_spatial_filter = True, - force_ogr_time_filter = True, - layer_name = '', - on_error = 'ignore', - sql_query = '', - time = null, ), - result_descriptor = geoengine_openapi_client.models.vector_result_descriptor.VectorResultDescriptor( - bbox = null, - columns = { - 'key' : geoengine_openapi_client.models.vector_column_info.VectorColumnInfo( - data_type = 'category', - measurement = null, ) - }, - data_type = 'Data', - spatial_reference = '', - time = null, ), - type = 'OgrMetaData' - ) - else: - return OgrMetaData( - loading_info = geoengine_openapi_client.models.ogr_source_dataset.OgrSourceDataset( - attribute_query = '', - cache_ttl = 0, - columns = null, - data_type = null, - default_geometry = null, - file_name = '', - force_ogr_spatial_filter = True, - force_ogr_time_filter = True, - layer_name = '', - on_error = 'ignore', - sql_query = '', - time = null, ), - result_descriptor = geoengine_openapi_client.models.vector_result_descriptor.VectorResultDescriptor( - bbox = null, - columns = { - 'key' : geoengine_openapi_client.models.vector_column_info.VectorColumnInfo( - data_type = 'category', - measurement = null, ) - }, - data_type = 'Data', - spatial_reference = '', - time = null, ), - type = 'OgrMetaData', - ) - """ - - def testOgrMetaData(self): - """Test OgrMetaData""" - # inst_req_only = self.make_instance(include_optional=False) - # inst_req_and_optional = self.make_instance(include_optional=True) - -if __name__ == '__main__': - unittest.main() diff --git a/python/test/test_ogr_source_column_spec.py b/python/test/test_ogr_source_column_spec.py index 81e164eb..b9f0f1b7 100644 --- a/python/test/test_ogr_source_column_spec.py +++ b/python/test/test_ogr_source_column_spec.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.ogr_source_column_spec import OgrSourceColumnSpec # noqa: E501 +from geoengine_openapi_client.models.ogr_source_column_spec import OgrSourceColumnSpec class TestOgrSourceColumnSpec(unittest.TestCase): """OgrSourceColumnSpec unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> OgrSourceColumnSpec: """Test OgrSourceColumnSpec - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `OgrSourceColumnSpec` """ - model = OgrSourceColumnSpec() # noqa: E501 + model = OgrSourceColumnSpec() if include_optional: return OgrSourceColumnSpec( bool = [ @@ -43,7 +42,7 @@ def make_instance(self, include_optional) -> OgrSourceColumnSpec: datetime = [ '' ], - float = [ + var_float = [ '' ], format_specifics = None, diff --git a/python/test/test_ogr_source_dataset.py b/python/test/test_ogr_source_dataset.py index f4c66e98..4eefdcb5 100644 --- a/python/test/test_ogr_source_dataset.py +++ b/python/test/test_ogr_source_dataset.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.ogr_source_dataset import OgrSourceDataset # noqa: E501 +from geoengine_openapi_client.models.ogr_source_dataset import OgrSourceDataset class TestOgrSourceDataset(unittest.TestCase): """OgrSourceDataset unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> OgrSourceDataset: """Test OgrSourceDataset - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `OgrSourceDataset` """ - model = OgrSourceDataset() # noqa: E501 + model = OgrSourceDataset() if include_optional: return OgrSourceDataset( attribute_query = '', diff --git a/python/test/test_ogr_source_dataset_time_type.py b/python/test/test_ogr_source_dataset_time_type.py index 148c91e8..d1039b08 100644 --- a/python/test/test_ogr_source_dataset_time_type.py +++ b/python/test/test_ogr_source_dataset_time_type.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.ogr_source_dataset_time_type import OgrSourceDatasetTimeType # noqa: E501 +from geoengine_openapi_client.models.ogr_source_dataset_time_type import OgrSourceDatasetTimeType class TestOgrSourceDatasetTimeType(unittest.TestCase): """OgrSourceDatasetTimeType unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> OgrSourceDatasetTimeType: """Test OgrSourceDatasetTimeType - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `OgrSourceDatasetTimeType` """ - model = OgrSourceDatasetTimeType() # noqa: E501 + model = OgrSourceDatasetTimeType() if include_optional: return OgrSourceDatasetTimeType( type = 'none', diff --git a/python/test/test_ogr_source_dataset_time_type_none.py b/python/test/test_ogr_source_dataset_time_type_none.py index 1ee8db63..34c5a92c 100644 --- a/python/test/test_ogr_source_dataset_time_type_none.py +++ b/python/test/test_ogr_source_dataset_time_type_none.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.ogr_source_dataset_time_type_none import OgrSourceDatasetTimeTypeNone # noqa: E501 +from geoengine_openapi_client.models.ogr_source_dataset_time_type_none import OgrSourceDatasetTimeTypeNone class TestOgrSourceDatasetTimeTypeNone(unittest.TestCase): """OgrSourceDatasetTimeTypeNone unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> OgrSourceDatasetTimeTypeNone: """Test OgrSourceDatasetTimeTypeNone - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `OgrSourceDatasetTimeTypeNone` """ - model = OgrSourceDatasetTimeTypeNone() # noqa: E501 + model = OgrSourceDatasetTimeTypeNone() if include_optional: return OgrSourceDatasetTimeTypeNone( type = 'none' diff --git a/python/test/test_ogr_source_dataset_time_type_start.py b/python/test/test_ogr_source_dataset_time_type_start.py index 9aa84cfa..06605fb7 100644 --- a/python/test/test_ogr_source_dataset_time_type_start.py +++ b/python/test/test_ogr_source_dataset_time_type_start.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.ogr_source_dataset_time_type_start import OgrSourceDatasetTimeTypeStart # noqa: E501 +from geoengine_openapi_client.models.ogr_source_dataset_time_type_start import OgrSourceDatasetTimeTypeStart class TestOgrSourceDatasetTimeTypeStart(unittest.TestCase): """OgrSourceDatasetTimeTypeStart unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> OgrSourceDatasetTimeTypeStart: """Test OgrSourceDatasetTimeTypeStart - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `OgrSourceDatasetTimeTypeStart` """ - model = OgrSourceDatasetTimeTypeStart() # noqa: E501 + model = OgrSourceDatasetTimeTypeStart() if include_optional: return OgrSourceDatasetTimeTypeStart( duration = None, diff --git a/python/test/test_ogr_source_dataset_time_type_start_duration.py b/python/test/test_ogr_source_dataset_time_type_start_duration.py index 8234fd22..dad7ac48 100644 --- a/python/test/test_ogr_source_dataset_time_type_start_duration.py +++ b/python/test/test_ogr_source_dataset_time_type_start_duration.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.ogr_source_dataset_time_type_start_duration import OgrSourceDatasetTimeTypeStartDuration # noqa: E501 +from geoengine_openapi_client.models.ogr_source_dataset_time_type_start_duration import OgrSourceDatasetTimeTypeStartDuration class TestOgrSourceDatasetTimeTypeStartDuration(unittest.TestCase): """OgrSourceDatasetTimeTypeStartDuration unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> OgrSourceDatasetTimeTypeStartDuration: """Test OgrSourceDatasetTimeTypeStartDuration - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `OgrSourceDatasetTimeTypeStartDuration` """ - model = OgrSourceDatasetTimeTypeStartDuration() # noqa: E501 + model = OgrSourceDatasetTimeTypeStartDuration() if include_optional: return OgrSourceDatasetTimeTypeStartDuration( duration_field = '', diff --git a/python/test/test_ogr_source_dataset_time_type_start_end.py b/python/test/test_ogr_source_dataset_time_type_start_end.py index 1176dffe..53af1094 100644 --- a/python/test/test_ogr_source_dataset_time_type_start_end.py +++ b/python/test/test_ogr_source_dataset_time_type_start_end.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.ogr_source_dataset_time_type_start_end import OgrSourceDatasetTimeTypeStartEnd # noqa: E501 +from geoengine_openapi_client.models.ogr_source_dataset_time_type_start_end import OgrSourceDatasetTimeTypeStartEnd class TestOgrSourceDatasetTimeTypeStartEnd(unittest.TestCase): """OgrSourceDatasetTimeTypeStartEnd unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> OgrSourceDatasetTimeTypeStartEnd: """Test OgrSourceDatasetTimeTypeStartEnd - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `OgrSourceDatasetTimeTypeStartEnd` """ - model = OgrSourceDatasetTimeTypeStartEnd() # noqa: E501 + model = OgrSourceDatasetTimeTypeStartEnd() if include_optional: return OgrSourceDatasetTimeTypeStartEnd( end_field = '', diff --git a/python/test/test_ogr_source_duration_spec.py b/python/test/test_ogr_source_duration_spec.py index 800a2112..575e691d 100644 --- a/python/test/test_ogr_source_duration_spec.py +++ b/python/test/test_ogr_source_duration_spec.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.ogr_source_duration_spec import OgrSourceDurationSpec # noqa: E501 +from geoengine_openapi_client.models.ogr_source_duration_spec import OgrSourceDurationSpec class TestOgrSourceDurationSpec(unittest.TestCase): """OgrSourceDurationSpec unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> OgrSourceDurationSpec: """Test OgrSourceDurationSpec - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `OgrSourceDurationSpec` """ - model = OgrSourceDurationSpec() # noqa: E501 + model = OgrSourceDurationSpec() if include_optional: return OgrSourceDurationSpec( type = 'infinite', diff --git a/python/test/test_ogr_source_duration_spec_infinite.py b/python/test/test_ogr_source_duration_spec_infinite.py index 53424f69..803c5c57 100644 --- a/python/test/test_ogr_source_duration_spec_infinite.py +++ b/python/test/test_ogr_source_duration_spec_infinite.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.ogr_source_duration_spec_infinite import OgrSourceDurationSpecInfinite # noqa: E501 +from geoengine_openapi_client.models.ogr_source_duration_spec_infinite import OgrSourceDurationSpecInfinite class TestOgrSourceDurationSpecInfinite(unittest.TestCase): """OgrSourceDurationSpecInfinite unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> OgrSourceDurationSpecInfinite: """Test OgrSourceDurationSpecInfinite - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `OgrSourceDurationSpecInfinite` """ - model = OgrSourceDurationSpecInfinite() # noqa: E501 + model = OgrSourceDurationSpecInfinite() if include_optional: return OgrSourceDurationSpecInfinite( type = 'infinite' diff --git a/python/test/test_ogr_source_duration_spec_value.py b/python/test/test_ogr_source_duration_spec_value.py index d91d7b57..eafd13b3 100644 --- a/python/test/test_ogr_source_duration_spec_value.py +++ b/python/test/test_ogr_source_duration_spec_value.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.ogr_source_duration_spec_value import OgrSourceDurationSpecValue # noqa: E501 +from geoengine_openapi_client.models.ogr_source_duration_spec_value import OgrSourceDurationSpecValue class TestOgrSourceDurationSpecValue(unittest.TestCase): """OgrSourceDurationSpecValue unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> OgrSourceDurationSpecValue: """Test OgrSourceDurationSpecValue - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `OgrSourceDurationSpecValue` """ - model = OgrSourceDurationSpecValue() # noqa: E501 + model = OgrSourceDurationSpecValue() if include_optional: return OgrSourceDurationSpecValue( granularity = 'millis', diff --git a/python/test/test_ogr_source_duration_spec_zero.py b/python/test/test_ogr_source_duration_spec_zero.py index 750bb6de..55ca109d 100644 --- a/python/test/test_ogr_source_duration_spec_zero.py +++ b/python/test/test_ogr_source_duration_spec_zero.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.ogr_source_duration_spec_zero import OgrSourceDurationSpecZero # noqa: E501 +from geoengine_openapi_client.models.ogr_source_duration_spec_zero import OgrSourceDurationSpecZero class TestOgrSourceDurationSpecZero(unittest.TestCase): """OgrSourceDurationSpecZero unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> OgrSourceDurationSpecZero: """Test OgrSourceDurationSpecZero - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `OgrSourceDurationSpecZero` """ - model = OgrSourceDurationSpecZero() # noqa: E501 + model = OgrSourceDurationSpecZero() if include_optional: return OgrSourceDurationSpecZero( type = 'zero' diff --git a/python/test/test_ogr_source_error_spec.py b/python/test/test_ogr_source_error_spec.py index 936db630..b4850f63 100644 --- a/python/test/test_ogr_source_error_spec.py +++ b/python/test/test_ogr_source_error_spec.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.ogr_source_error_spec import OgrSourceErrorSpec # noqa: E501 +from geoengine_openapi_client.models.ogr_source_error_spec import OgrSourceErrorSpec class TestOgrSourceErrorSpec(unittest.TestCase): """OgrSourceErrorSpec unit test stubs""" diff --git a/python/test/test_ogr_source_time_format.py b/python/test/test_ogr_source_time_format.py index 4494d4fb..3d7e635b 100644 --- a/python/test/test_ogr_source_time_format.py +++ b/python/test/test_ogr_source_time_format.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.ogr_source_time_format import OgrSourceTimeFormat # noqa: E501 +from geoengine_openapi_client.models.ogr_source_time_format import OgrSourceTimeFormat class TestOgrSourceTimeFormat(unittest.TestCase): """OgrSourceTimeFormat unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> OgrSourceTimeFormat: """Test OgrSourceTimeFormat - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `OgrSourceTimeFormat` """ - model = OgrSourceTimeFormat() # noqa: E501 + model = OgrSourceTimeFormat() if include_optional: return OgrSourceTimeFormat( custom_format = '', diff --git a/python/test/test_ogr_source_time_format_custom.py b/python/test/test_ogr_source_time_format_one_of.py similarity index 64% rename from python/test/test_ogr_source_time_format_custom.py rename to python/test/test_ogr_source_time_format_one_of.py index b7620bb7..790069c0 100644 --- a/python/test/test_ogr_source_time_format_custom.py +++ b/python/test/test_ogr_source_time_format_one_of.py @@ -14,12 +14,11 @@ import unittest -import datetime -from geoengine_openapi_client.models.ogr_source_time_format_custom import OgrSourceTimeFormatCustom # noqa: E501 +from geoengine_openapi_client.models.ogr_source_time_format_one_of import OgrSourceTimeFormatOneOf -class TestOgrSourceTimeFormatCustom(unittest.TestCase): - """OgrSourceTimeFormatCustom unit test stubs""" +class TestOgrSourceTimeFormatOneOf(unittest.TestCase): + """OgrSourceTimeFormatOneOf unit test stubs""" def setUp(self): pass @@ -27,28 +26,28 @@ def setUp(self): def tearDown(self): pass - def make_instance(self, include_optional) -> OgrSourceTimeFormatCustom: - """Test OgrSourceTimeFormatCustom - include_option is a boolean, when False only required + def make_instance(self, include_optional) -> OgrSourceTimeFormatOneOf: + """Test OgrSourceTimeFormatOneOf + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ - # uncomment below to create an instance of `OgrSourceTimeFormatCustom` + # uncomment below to create an instance of `OgrSourceTimeFormatOneOf` """ - model = OgrSourceTimeFormatCustom() # noqa: E501 + model = OgrSourceTimeFormatOneOf() if include_optional: - return OgrSourceTimeFormatCustom( + return OgrSourceTimeFormatOneOf( custom_format = '', format = 'custom' ) else: - return OgrSourceTimeFormatCustom( + return OgrSourceTimeFormatOneOf( custom_format = '', format = 'custom', ) """ - def testOgrSourceTimeFormatCustom(self): - """Test OgrSourceTimeFormatCustom""" + def testOgrSourceTimeFormatOneOf(self): + """Test OgrSourceTimeFormatOneOf""" # inst_req_only = self.make_instance(include_optional=False) # inst_req_and_optional = self.make_instance(include_optional=True) diff --git a/python/test/test_ogr_source_time_format_unix_time_stamp.py b/python/test/test_ogr_source_time_format_one_of1.py similarity index 62% rename from python/test/test_ogr_source_time_format_unix_time_stamp.py rename to python/test/test_ogr_source_time_format_one_of1.py index 6796fd77..f215ef3f 100644 --- a/python/test/test_ogr_source_time_format_unix_time_stamp.py +++ b/python/test/test_ogr_source_time_format_one_of1.py @@ -14,12 +14,11 @@ import unittest -import datetime -from geoengine_openapi_client.models.ogr_source_time_format_unix_time_stamp import OgrSourceTimeFormatUnixTimeStamp # noqa: E501 +from geoengine_openapi_client.models.ogr_source_time_format_one_of1 import OgrSourceTimeFormatOneOf1 -class TestOgrSourceTimeFormatUnixTimeStamp(unittest.TestCase): - """OgrSourceTimeFormatUnixTimeStamp unit test stubs""" +class TestOgrSourceTimeFormatOneOf1(unittest.TestCase): + """OgrSourceTimeFormatOneOf1 unit test stubs""" def setUp(self): pass @@ -27,28 +26,28 @@ def setUp(self): def tearDown(self): pass - def make_instance(self, include_optional) -> OgrSourceTimeFormatUnixTimeStamp: - """Test OgrSourceTimeFormatUnixTimeStamp - include_option is a boolean, when False only required + def make_instance(self, include_optional) -> OgrSourceTimeFormatOneOf1: + """Test OgrSourceTimeFormatOneOf1 + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ - # uncomment below to create an instance of `OgrSourceTimeFormatUnixTimeStamp` + # uncomment below to create an instance of `OgrSourceTimeFormatOneOf1` """ - model = OgrSourceTimeFormatUnixTimeStamp() # noqa: E501 + model = OgrSourceTimeFormatOneOf1() if include_optional: - return OgrSourceTimeFormatUnixTimeStamp( + return OgrSourceTimeFormatOneOf1( format = 'unixTimeStamp', timestamp_type = 'epochSeconds' ) else: - return OgrSourceTimeFormatUnixTimeStamp( + return OgrSourceTimeFormatOneOf1( format = 'unixTimeStamp', timestamp_type = 'epochSeconds', ) """ - def testOgrSourceTimeFormatUnixTimeStamp(self): - """Test OgrSourceTimeFormatUnixTimeStamp""" + def testOgrSourceTimeFormatOneOf1(self): + """Test OgrSourceTimeFormatOneOf1""" # inst_req_only = self.make_instance(include_optional=False) # inst_req_and_optional = self.make_instance(include_optional=True) diff --git a/python/test/test_ogr_source_time_format_auto.py b/python/test/test_ogr_source_time_format_one_of2.py similarity index 63% rename from python/test/test_ogr_source_time_format_auto.py rename to python/test/test_ogr_source_time_format_one_of2.py index b99a1355..de009f10 100644 --- a/python/test/test_ogr_source_time_format_auto.py +++ b/python/test/test_ogr_source_time_format_one_of2.py @@ -14,12 +14,11 @@ import unittest -import datetime -from geoengine_openapi_client.models.ogr_source_time_format_auto import OgrSourceTimeFormatAuto # noqa: E501 +from geoengine_openapi_client.models.ogr_source_time_format_one_of2 import OgrSourceTimeFormatOneOf2 -class TestOgrSourceTimeFormatAuto(unittest.TestCase): - """OgrSourceTimeFormatAuto unit test stubs""" +class TestOgrSourceTimeFormatOneOf2(unittest.TestCase): + """OgrSourceTimeFormatOneOf2 unit test stubs""" def setUp(self): pass @@ -27,26 +26,26 @@ def setUp(self): def tearDown(self): pass - def make_instance(self, include_optional) -> OgrSourceTimeFormatAuto: - """Test OgrSourceTimeFormatAuto - include_option is a boolean, when False only required + def make_instance(self, include_optional) -> OgrSourceTimeFormatOneOf2: + """Test OgrSourceTimeFormatOneOf2 + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ - # uncomment below to create an instance of `OgrSourceTimeFormatAuto` + # uncomment below to create an instance of `OgrSourceTimeFormatOneOf2` """ - model = OgrSourceTimeFormatAuto() # noqa: E501 + model = OgrSourceTimeFormatOneOf2() if include_optional: - return OgrSourceTimeFormatAuto( + return OgrSourceTimeFormatOneOf2( format = 'auto' ) else: - return OgrSourceTimeFormatAuto( + return OgrSourceTimeFormatOneOf2( format = 'auto', ) """ - def testOgrSourceTimeFormatAuto(self): - """Test OgrSourceTimeFormatAuto""" + def testOgrSourceTimeFormatOneOf2(self): + """Test OgrSourceTimeFormatOneOf2""" # inst_req_only = self.make_instance(include_optional=False) # inst_req_and_optional = self.make_instance(include_optional=True) diff --git a/python/test/test_operator_quota.py b/python/test/test_operator_quota.py index 6cbf2de9..b73e74c9 100644 --- a/python/test/test_operator_quota.py +++ b/python/test/test_operator_quota.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.operator_quota import OperatorQuota # noqa: E501 +from geoengine_openapi_client.models.operator_quota import OperatorQuota class TestOperatorQuota(unittest.TestCase): """OperatorQuota unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> OperatorQuota: """Test OperatorQuota - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `OperatorQuota` """ - model = OperatorQuota() # noqa: E501 + model = OperatorQuota() if include_optional: return OperatorQuota( count = 0, diff --git a/python/test/test_order_by.py b/python/test/test_order_by.py index 692f668a..79dd079d 100644 --- a/python/test/test_order_by.py +++ b/python/test/test_order_by.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.order_by import OrderBy # noqa: E501 +from geoengine_openapi_client.models.order_by import OrderBy class TestOrderBy(unittest.TestCase): """OrderBy unit test stubs""" diff --git a/python/test/test_palette_colorizer.py b/python/test/test_palette_colorizer.py index 88790f13..82d79002 100644 --- a/python/test/test_palette_colorizer.py +++ b/python/test/test_palette_colorizer.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.palette_colorizer import PaletteColorizer # noqa: E501 +from geoengine_openapi_client.models.palette_colorizer import PaletteColorizer class TestPaletteColorizer(unittest.TestCase): """PaletteColorizer unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> PaletteColorizer: """Test PaletteColorizer - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `PaletteColorizer` """ - model = PaletteColorizer() # noqa: E501 + model = PaletteColorizer() if include_optional: return PaletteColorizer( colors = { diff --git a/python/test/test_permission.py b/python/test/test_permission.py index c43def16..d940e796 100644 --- a/python/test/test_permission.py +++ b/python/test/test_permission.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.permission import Permission # noqa: E501 +from geoengine_openapi_client.models.permission import Permission class TestPermission(unittest.TestCase): """Permission unit test stubs""" diff --git a/python/test/test_permission_list_options.py b/python/test/test_permission_list_options.py index 2ab29119..72cf54b6 100644 --- a/python/test/test_permission_list_options.py +++ b/python/test/test_permission_list_options.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.permission_list_options import PermissionListOptions # noqa: E501 +from geoengine_openapi_client.models.permission_list_options import PermissionListOptions class TestPermissionListOptions(unittest.TestCase): """PermissionListOptions unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> PermissionListOptions: """Test PermissionListOptions - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `PermissionListOptions` """ - model = PermissionListOptions() # noqa: E501 + model = PermissionListOptions() if include_optional: return PermissionListOptions( limit = 0, diff --git a/python/test/test_permission_listing.py b/python/test/test_permission_listing.py index 2ee16fd0..5ff41d18 100644 --- a/python/test/test_permission_listing.py +++ b/python/test/test_permission_listing.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.permission_listing import PermissionListing # noqa: E501 +from geoengine_openapi_client.models.permission_listing import PermissionListing class TestPermissionListing(unittest.TestCase): """PermissionListing unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> PermissionListing: """Test PermissionListing - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `PermissionListing` """ - model = PermissionListing() # noqa: E501 + model = PermissionListing() if include_optional: return PermissionListing( permission = 'Read', diff --git a/python/test/test_permission_request.py b/python/test/test_permission_request.py index d3eeaa35..951c299d 100644 --- a/python/test/test_permission_request.py +++ b/python/test/test_permission_request.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.permission_request import PermissionRequest # noqa: E501 +from geoengine_openapi_client.models.permission_request import PermissionRequest class TestPermissionRequest(unittest.TestCase): """PermissionRequest unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> PermissionRequest: """Test PermissionRequest - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `PermissionRequest` """ - model = PermissionRequest() # noqa: E501 + model = PermissionRequest() if include_optional: return PermissionRequest( permission = 'Read', diff --git a/python/test/test_permissions_api.py b/python/test/test_permissions_api.py index 3beaf07e..db811c97 100644 --- a/python/test/test_permissions_api.py +++ b/python/test/test_permissions_api.py @@ -15,14 +15,14 @@ import unittest -from geoengine_openapi_client.api.permissions_api import PermissionsApi # noqa: E501 +from geoengine_openapi_client.api.permissions_api import PermissionsApi class TestPermissionsApi(unittest.TestCase): """PermissionsApi unit test stubs""" def setUp(self) -> None: - self.api = PermissionsApi() # noqa: E501 + self.api = PermissionsApi() def tearDown(self) -> None: pass @@ -30,21 +30,21 @@ def tearDown(self) -> None: def test_add_permission_handler(self) -> None: """Test case for add_permission_handler - Adds a new permission. # noqa: E501 + Adds a new permission. """ pass def test_get_resource_permissions_handler(self) -> None: """Test case for get_resource_permissions_handler - Lists permission for a given resource. # noqa: E501 + Lists permission for a given resource. """ pass def test_remove_permission_handler(self) -> None: """Test case for remove_permission_handler - Removes an existing permission. # noqa: E501 + Removes an existing permission. """ pass diff --git a/python/test/test_plot.py b/python/test/test_plot.py index 49270606..9ced7aa1 100644 --- a/python/test/test_plot.py +++ b/python/test/test_plot.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.plot import Plot # noqa: E501 +from geoengine_openapi_client.models.plot import Plot class TestPlot(unittest.TestCase): """Plot unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> Plot: """Test Plot - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `Plot` """ - model = Plot() # noqa: E501 + model = Plot() if include_optional: return Plot( name = '', diff --git a/python/test/test_plot_output_format.py b/python/test/test_plot_output_format.py index a7ddc5ec..6fd579ef 100644 --- a/python/test/test_plot_output_format.py +++ b/python/test/test_plot_output_format.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.plot_output_format import PlotOutputFormat # noqa: E501 +from geoengine_openapi_client.models.plot_output_format import PlotOutputFormat class TestPlotOutputFormat(unittest.TestCase): """PlotOutputFormat unit test stubs""" diff --git a/python/test/test_plot_query_rectangle.py b/python/test/test_plot_query_rectangle.py deleted file mode 100644 index c984e842..00000000 --- a/python/test/test_plot_query_rectangle.py +++ /dev/null @@ -1,78 +0,0 @@ -# coding: utf-8 - -""" - Geo Engine API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - - The version of the OpenAPI document: 0.8.0 - Contact: dev@geoengine.de - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -import unittest -import datetime - -from geoengine_openapi_client.models.plot_query_rectangle import PlotQueryRectangle # noqa: E501 - -class TestPlotQueryRectangle(unittest.TestCase): - """PlotQueryRectangle unit test stubs""" - - def setUp(self): - pass - - def tearDown(self): - pass - - def make_instance(self, include_optional) -> PlotQueryRectangle: - """Test PlotQueryRectangle - include_option is a boolean, when False only required - params are included, when True both required and - optional params are included """ - # uncomment below to create an instance of `PlotQueryRectangle` - """ - model = PlotQueryRectangle() # noqa: E501 - if include_optional: - return PlotQueryRectangle( - spatial_bounds = geoengine_openapi_client.models.bounding_box2_d.BoundingBox2D( - lower_left_coordinate = geoengine_openapi_client.models.coordinate2_d.Coordinate2D( - x = 1.337, - y = 1.337, ), - upper_right_coordinate = geoengine_openapi_client.models.coordinate2_d.Coordinate2D( - x = 1.337, - y = 1.337, ), ), - spatial_resolution = geoengine_openapi_client.models.spatial_resolution.SpatialResolution( - x = 1.337, - y = 1.337, ), - time_interval = geoengine_openapi_client.models.time_interval.TimeInterval( - end = 56, - start = 56, ) - ) - else: - return PlotQueryRectangle( - spatial_bounds = geoengine_openapi_client.models.bounding_box2_d.BoundingBox2D( - lower_left_coordinate = geoengine_openapi_client.models.coordinate2_d.Coordinate2D( - x = 1.337, - y = 1.337, ), - upper_right_coordinate = geoengine_openapi_client.models.coordinate2_d.Coordinate2D( - x = 1.337, - y = 1.337, ), ), - spatial_resolution = geoengine_openapi_client.models.spatial_resolution.SpatialResolution( - x = 1.337, - y = 1.337, ), - time_interval = geoengine_openapi_client.models.time_interval.TimeInterval( - end = 56, - start = 56, ), - ) - """ - - def testPlotQueryRectangle(self): - """Test PlotQueryRectangle""" - # inst_req_only = self.make_instance(include_optional=False) - # inst_req_and_optional = self.make_instance(include_optional=True) - -if __name__ == '__main__': - unittest.main() diff --git a/python/test/test_plot_result_descriptor.py b/python/test/test_plot_result_descriptor.py index 8c9d5469..4f84be94 100644 --- a/python/test/test_plot_result_descriptor.py +++ b/python/test/test_plot_result_descriptor.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.plot_result_descriptor import PlotResultDescriptor # noqa: E501 +from geoengine_openapi_client.models.plot_result_descriptor import PlotResultDescriptor class TestPlotResultDescriptor(unittest.TestCase): """PlotResultDescriptor unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> PlotResultDescriptor: """Test PlotResultDescriptor - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `PlotResultDescriptor` """ - model = PlotResultDescriptor() # noqa: E501 + model = PlotResultDescriptor() if include_optional: return PlotResultDescriptor( bbox = geoengine_openapi_client.models.bounding_box2_d.BoundingBox2D( diff --git a/python/test/test_plots_api.py b/python/test/test_plots_api.py index 801ae52d..988275b4 100644 --- a/python/test/test_plots_api.py +++ b/python/test/test_plots_api.py @@ -15,14 +15,14 @@ import unittest -from geoengine_openapi_client.api.plots_api import PlotsApi # noqa: E501 +from geoengine_openapi_client.api.plots_api import PlotsApi class TestPlotsApi(unittest.TestCase): """PlotsApi unit test stubs""" def setUp(self) -> None: - self.api = PlotsApi() # noqa: E501 + self.api = PlotsApi() def tearDown(self) -> None: pass @@ -30,7 +30,7 @@ def tearDown(self) -> None: def test_get_plot_handler(self) -> None: """Test case for get_plot_handler - Generates a plot. # noqa: E501 + Generates a plot. """ pass diff --git a/python/test/test_point_symbology.py b/python/test/test_point_symbology.py index 4c4c423a..14c13fe3 100644 --- a/python/test/test_point_symbology.py +++ b/python/test/test_point_symbology.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.point_symbology import PointSymbology # noqa: E501 +from geoengine_openapi_client.models.point_symbology import PointSymbology class TestPointSymbology(unittest.TestCase): """PointSymbology unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> PointSymbology: """Test PointSymbology - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `PointSymbology` """ - model = PointSymbology() # noqa: E501 + model = PointSymbology() if include_optional: return PointSymbology( fill_color = None, diff --git a/python/test/test_polygon_symbology.py b/python/test/test_polygon_symbology.py index 75d8dcdc..4c90416d 100644 --- a/python/test/test_polygon_symbology.py +++ b/python/test/test_polygon_symbology.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.polygon_symbology import PolygonSymbology # noqa: E501 +from geoengine_openapi_client.models.polygon_symbology import PolygonSymbology class TestPolygonSymbology(unittest.TestCase): """PolygonSymbology unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> PolygonSymbology: """Test PolygonSymbology - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `PolygonSymbology` """ - model = PolygonSymbology() # noqa: E501 + model = PolygonSymbology() if include_optional: return PolygonSymbology( auto_simplified = True, diff --git a/python/test/test_project.py b/python/test/test_project.py index 79946e26..7faefba2 100644 --- a/python/test/test_project.py +++ b/python/test/test_project.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.project import Project # noqa: E501 +from geoengine_openapi_client.models.project import Project class TestProject(unittest.TestCase): """Project unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> Project: """Test Project - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `Project` """ - model = Project() # noqa: E501 + model = Project() if include_optional: return Project( bounds = geoengine_openapi_client.models.st_rectangle.STRectangle( diff --git a/python/test/test_project_layer.py b/python/test/test_project_layer.py index 37fe9a17..12dd5d59 100644 --- a/python/test/test_project_layer.py +++ b/python/test/test_project_layer.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.project_layer import ProjectLayer # noqa: E501 +from geoengine_openapi_client.models.project_layer import ProjectLayer class TestProjectLayer(unittest.TestCase): """ProjectLayer unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> ProjectLayer: """Test ProjectLayer - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `ProjectLayer` """ - model = ProjectLayer() # noqa: E501 + model = ProjectLayer() if include_optional: return ProjectLayer( name = '', diff --git a/python/test/test_project_listing.py b/python/test/test_project_listing.py index f727143f..acc99244 100644 --- a/python/test/test_project_listing.py +++ b/python/test/test_project_listing.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.project_listing import ProjectListing # noqa: E501 +from geoengine_openapi_client.models.project_listing import ProjectListing class TestProjectListing(unittest.TestCase): """ProjectListing unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> ProjectListing: """Test ProjectListing - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `ProjectListing` """ - model = ProjectListing() # noqa: E501 + model = ProjectListing() if include_optional: return ProjectListing( changed = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), diff --git a/python/test/test_project_resource.py b/python/test/test_project_resource.py index 7ba319b1..d854c337 100644 --- a/python/test/test_project_resource.py +++ b/python/test/test_project_resource.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.project_resource import ProjectResource # noqa: E501 +from geoengine_openapi_client.models.project_resource import ProjectResource class TestProjectResource(unittest.TestCase): """ProjectResource unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> ProjectResource: """Test ProjectResource - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `ProjectResource` """ - model = ProjectResource() # noqa: E501 + model = ProjectResource() if include_optional: return ProjectResource( id = '', diff --git a/python/test/test_project_update_token.py b/python/test/test_project_update_token.py index 3804f8f4..b4beb713 100644 --- a/python/test/test_project_update_token.py +++ b/python/test/test_project_update_token.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.project_update_token import ProjectUpdateToken # noqa: E501 +from geoengine_openapi_client.models.project_update_token import ProjectUpdateToken class TestProjectUpdateToken(unittest.TestCase): """ProjectUpdateToken unit test stubs""" diff --git a/python/test/test_project_version.py b/python/test/test_project_version.py index 3b73d581..716b702f 100644 --- a/python/test/test_project_version.py +++ b/python/test/test_project_version.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.project_version import ProjectVersion # noqa: E501 +from geoengine_openapi_client.models.project_version import ProjectVersion class TestProjectVersion(unittest.TestCase): """ProjectVersion unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> ProjectVersion: """Test ProjectVersion - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `ProjectVersion` """ - model = ProjectVersion() # noqa: E501 + model = ProjectVersion() if include_optional: return ProjectVersion( changed = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), diff --git a/python/test/test_projects_api.py b/python/test/test_projects_api.py index edd9f7dd..9372c2e4 100644 --- a/python/test/test_projects_api.py +++ b/python/test/test_projects_api.py @@ -15,14 +15,14 @@ import unittest -from geoengine_openapi_client.api.projects_api import ProjectsApi # noqa: E501 +from geoengine_openapi_client.api.projects_api import ProjectsApi class TestProjectsApi(unittest.TestCase): """ProjectsApi unit test stubs""" def setUp(self) -> None: - self.api = ProjectsApi() # noqa: E501 + self.api = ProjectsApi() def tearDown(self) -> None: pass @@ -30,49 +30,49 @@ def tearDown(self) -> None: def test_create_project_handler(self) -> None: """Test case for create_project_handler - Create a new project for the user. # noqa: E501 + Create a new project for the user. """ pass def test_delete_project_handler(self) -> None: """Test case for delete_project_handler - Deletes a project. # noqa: E501 + Deletes a project. """ pass def test_list_projects_handler(self) -> None: """Test case for list_projects_handler - List all projects accessible to the user that match the selected criteria. # noqa: E501 + List all projects accessible to the user that match the selected criteria. """ pass def test_load_project_latest_handler(self) -> None: """Test case for load_project_latest_handler - Retrieves details about the latest version of a project. # noqa: E501 + Retrieves details about the latest version of a project. """ pass def test_load_project_version_handler(self) -> None: """Test case for load_project_version_handler - Retrieves details about the given version of a project. # noqa: E501 + Retrieves details about the given version of a project. """ pass def test_project_versions_handler(self) -> None: """Test case for project_versions_handler - Lists all available versions of a project. # noqa: E501 + Lists all available versions of a project. """ pass def test_update_project_handler(self) -> None: """Test case for update_project_handler - Updates a project. # noqa: E501 + Updates a project. This will create a new version. """ pass diff --git a/python/test/test_provenance.py b/python/test/test_provenance.py index 5e8d3404..17ae6b29 100644 --- a/python/test/test_provenance.py +++ b/python/test/test_provenance.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.provenance import Provenance # noqa: E501 +from geoengine_openapi_client.models.provenance import Provenance class TestProvenance(unittest.TestCase): """Provenance unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> Provenance: """Test Provenance - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `Provenance` """ - model = Provenance() # noqa: E501 + model = Provenance() if include_optional: return Provenance( citation = '', diff --git a/python/test/test_provenance_entry.py b/python/test/test_provenance_entry.py index f29a33fa..c70cd847 100644 --- a/python/test/test_provenance_entry.py +++ b/python/test/test_provenance_entry.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.provenance_entry import ProvenanceEntry # noqa: E501 +from geoengine_openapi_client.models.provenance_entry import ProvenanceEntry class TestProvenanceEntry(unittest.TestCase): """ProvenanceEntry unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> ProvenanceEntry: """Test ProvenanceEntry - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `ProvenanceEntry` """ - model = ProvenanceEntry() # noqa: E501 + model = ProvenanceEntry() if include_optional: return ProvenanceEntry( data = [ diff --git a/python/test/test_provenance_output.py b/python/test/test_provenance_output.py index 8f86ac71..01ec5062 100644 --- a/python/test/test_provenance_output.py +++ b/python/test/test_provenance_output.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.provenance_output import ProvenanceOutput # noqa: E501 +from geoengine_openapi_client.models.provenance_output import ProvenanceOutput class TestProvenanceOutput(unittest.TestCase): """ProvenanceOutput unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> ProvenanceOutput: """Test ProvenanceOutput - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `ProvenanceOutput` """ - model = ProvenanceOutput() # noqa: E501 + model = ProvenanceOutput() if include_optional: return ProvenanceOutput( data = None, diff --git a/python/test/test_provenances.py b/python/test/test_provenances.py index 2a94547a..2e81964d 100644 --- a/python/test/test_provenances.py +++ b/python/test/test_provenances.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.provenances import Provenances # noqa: E501 +from geoengine_openapi_client.models.provenances import Provenances class TestProvenances(unittest.TestCase): """Provenances unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> Provenances: """Test Provenances - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `Provenances` """ - model = Provenances() # noqa: E501 + model = Provenances() if include_optional: return Provenances( provenances = [ diff --git a/python/test/test_provider_capabilities.py b/python/test/test_provider_capabilities.py index a12cce2a..98671351 100644 --- a/python/test/test_provider_capabilities.py +++ b/python/test/test_provider_capabilities.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.provider_capabilities import ProviderCapabilities # noqa: E501 +from geoengine_openapi_client.models.provider_capabilities import ProviderCapabilities class TestProviderCapabilities(unittest.TestCase): """ProviderCapabilities unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> ProviderCapabilities: """Test ProviderCapabilities - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `ProviderCapabilities` """ - model = ProviderCapabilities() # noqa: E501 + model = ProviderCapabilities() if include_optional: return ProviderCapabilities( listing = True, diff --git a/python/test/test_provider_layer_collection_id.py b/python/test/test_provider_layer_collection_id.py index 1c30df71..33478e31 100644 --- a/python/test/test_provider_layer_collection_id.py +++ b/python/test/test_provider_layer_collection_id.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.provider_layer_collection_id import ProviderLayerCollectionId # noqa: E501 +from geoengine_openapi_client.models.provider_layer_collection_id import ProviderLayerCollectionId class TestProviderLayerCollectionId(unittest.TestCase): """ProviderLayerCollectionId unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> ProviderLayerCollectionId: """Test ProviderLayerCollectionId - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `ProviderLayerCollectionId` """ - model = ProviderLayerCollectionId() # noqa: E501 + model = ProviderLayerCollectionId() if include_optional: return ProviderLayerCollectionId( collection_id = '', diff --git a/python/test/test_provider_layer_id.py b/python/test/test_provider_layer_id.py index d7164b6e..b756d835 100644 --- a/python/test/test_provider_layer_id.py +++ b/python/test/test_provider_layer_id.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.provider_layer_id import ProviderLayerId # noqa: E501 +from geoengine_openapi_client.models.provider_layer_id import ProviderLayerId class TestProviderLayerId(unittest.TestCase): """ProviderLayerId unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> ProviderLayerId: """Test ProviderLayerId - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `ProviderLayerId` """ - model = ProviderLayerId() # noqa: E501 + model = ProviderLayerId() if include_optional: return ProviderLayerId( layer_id = '', diff --git a/python/test/test_raster_query_rectangle.py b/python/test/test_query_rectangle.py similarity index 78% rename from python/test/test_raster_query_rectangle.py rename to python/test/test_query_rectangle.py index b5d46b77..14ed3843 100644 --- a/python/test/test_raster_query_rectangle.py +++ b/python/test/test_query_rectangle.py @@ -14,12 +14,11 @@ import unittest -import datetime -from geoengine_openapi_client.models.raster_query_rectangle import RasterQueryRectangle # noqa: E501 +from geoengine_openapi_client.models.query_rectangle import QueryRectangle -class TestRasterQueryRectangle(unittest.TestCase): - """RasterQueryRectangle unit test stubs""" +class TestQueryRectangle(unittest.TestCase): + """QueryRectangle unit test stubs""" def setUp(self): pass @@ -27,16 +26,16 @@ def setUp(self): def tearDown(self): pass - def make_instance(self, include_optional) -> RasterQueryRectangle: - """Test RasterQueryRectangle - include_option is a boolean, when False only required + def make_instance(self, include_optional) -> QueryRectangle: + """Test QueryRectangle + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ - # uncomment below to create an instance of `RasterQueryRectangle` + # uncomment below to create an instance of `QueryRectangle` """ - model = RasterQueryRectangle() # noqa: E501 + model = QueryRectangle() if include_optional: - return RasterQueryRectangle( + return QueryRectangle( spatial_bounds = geoengine_openapi_client.models.spatial_partition2_d.SpatialPartition2D( lower_right_coordinate = geoengine_openapi_client.models.coordinate2_d.Coordinate2D( x = 1.337, @@ -52,7 +51,7 @@ def make_instance(self, include_optional) -> RasterQueryRectangle: start = 56, ) ) else: - return RasterQueryRectangle( + return QueryRectangle( spatial_bounds = geoengine_openapi_client.models.spatial_partition2_d.SpatialPartition2D( lower_right_coordinate = geoengine_openapi_client.models.coordinate2_d.Coordinate2D( x = 1.337, @@ -69,8 +68,8 @@ def make_instance(self, include_optional) -> RasterQueryRectangle: ) """ - def testRasterQueryRectangle(self): - """Test RasterQueryRectangle""" + def testQueryRectangle(self): + """Test QueryRectangle""" # inst_req_only = self.make_instance(include_optional=False) # inst_req_and_optional = self.make_instance(include_optional=True) diff --git a/python/test/test_quota.py b/python/test/test_quota.py index e4a75128..b44f401d 100644 --- a/python/test/test_quota.py +++ b/python/test/test_quota.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.quota import Quota # noqa: E501 +from geoengine_openapi_client.models.quota import Quota class TestQuota(unittest.TestCase): """Quota unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> Quota: """Test Quota - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `Quota` """ - model = Quota() # noqa: E501 + model = Quota() if include_optional: return Quota( available = 56, diff --git a/python/test/test_raster_band_descriptor.py b/python/test/test_raster_band_descriptor.py index 1808fdb1..c8298f35 100644 --- a/python/test/test_raster_band_descriptor.py +++ b/python/test/test_raster_band_descriptor.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.raster_band_descriptor import RasterBandDescriptor # noqa: E501 +from geoengine_openapi_client.models.raster_band_descriptor import RasterBandDescriptor class TestRasterBandDescriptor(unittest.TestCase): """RasterBandDescriptor unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> RasterBandDescriptor: """Test RasterBandDescriptor - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `RasterBandDescriptor` """ - model = RasterBandDescriptor() # noqa: E501 + model = RasterBandDescriptor() if include_optional: return RasterBandDescriptor( measurement = None, diff --git a/python/test/test_raster_colorizer.py b/python/test/test_raster_colorizer.py index 7af0490b..fc671037 100644 --- a/python/test/test_raster_colorizer.py +++ b/python/test/test_raster_colorizer.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.raster_colorizer import RasterColorizer # noqa: E501 +from geoengine_openapi_client.models.raster_colorizer import RasterColorizer class TestRasterColorizer(unittest.TestCase): """RasterColorizer unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> RasterColorizer: """Test RasterColorizer - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `RasterColorizer` """ - model = RasterColorizer() # noqa: E501 + model = RasterColorizer() if include_optional: return RasterColorizer( band = 0, diff --git a/python/test/test_raster_data_type.py b/python/test/test_raster_data_type.py index 4a409522..4b8642ed 100644 --- a/python/test/test_raster_data_type.py +++ b/python/test/test_raster_data_type.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.raster_data_type import RasterDataType # noqa: E501 +from geoengine_openapi_client.models.raster_data_type import RasterDataType class TestRasterDataType(unittest.TestCase): """RasterDataType unit test stubs""" diff --git a/python/test/test_raster_dataset_from_workflow.py b/python/test/test_raster_dataset_from_workflow.py index a7030b56..e4d5b002 100644 --- a/python/test/test_raster_dataset_from_workflow.py +++ b/python/test/test_raster_dataset_from_workflow.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.raster_dataset_from_workflow import RasterDatasetFromWorkflow # noqa: E501 +from geoengine_openapi_client.models.raster_dataset_from_workflow import RasterDatasetFromWorkflow class TestRasterDatasetFromWorkflow(unittest.TestCase): """RasterDatasetFromWorkflow unit test stubs""" @@ -29,19 +28,19 @@ def tearDown(self): def make_instance(self, include_optional) -> RasterDatasetFromWorkflow: """Test RasterDatasetFromWorkflow - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `RasterDatasetFromWorkflow` """ - model = RasterDatasetFromWorkflow() # noqa: E501 + model = RasterDatasetFromWorkflow() if include_optional: return RasterDatasetFromWorkflow( as_cog = True, description = '', display_name = '', name = '', - query = geoengine_openapi_client.models.raster_query_rectangle.RasterQueryRectangle( + query = geoengine_openapi_client.models.query_rectangle.QueryRectangle( spatial_bounds = geoengine_openapi_client.models.spatial_partition2_d.SpatialPartition2D( lower_right_coordinate = geoengine_openapi_client.models.coordinate2_d.Coordinate2D( x = 1.337, @@ -59,7 +58,7 @@ def make_instance(self, include_optional) -> RasterDatasetFromWorkflow: else: return RasterDatasetFromWorkflow( display_name = '', - query = geoengine_openapi_client.models.raster_query_rectangle.RasterQueryRectangle( + query = geoengine_openapi_client.models.query_rectangle.QueryRectangle( spatial_bounds = geoengine_openapi_client.models.spatial_partition2_d.SpatialPartition2D( lower_right_coordinate = geoengine_openapi_client.models.coordinate2_d.Coordinate2D( x = 1.337, diff --git a/python/test/test_raster_dataset_from_workflow_result.py b/python/test/test_raster_dataset_from_workflow_result.py index 807218ae..a9536305 100644 --- a/python/test/test_raster_dataset_from_workflow_result.py +++ b/python/test/test_raster_dataset_from_workflow_result.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.raster_dataset_from_workflow_result import RasterDatasetFromWorkflowResult # noqa: E501 +from geoengine_openapi_client.models.raster_dataset_from_workflow_result import RasterDatasetFromWorkflowResult class TestRasterDatasetFromWorkflowResult(unittest.TestCase): """RasterDatasetFromWorkflowResult unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> RasterDatasetFromWorkflowResult: """Test RasterDatasetFromWorkflowResult - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `RasterDatasetFromWorkflowResult` """ - model = RasterDatasetFromWorkflowResult() # noqa: E501 + model = RasterDatasetFromWorkflowResult() if include_optional: return RasterDatasetFromWorkflowResult( dataset = '', diff --git a/python/test/test_raster_properties_entry_type.py b/python/test/test_raster_properties_entry_type.py index cc37e8ff..b6334751 100644 --- a/python/test/test_raster_properties_entry_type.py +++ b/python/test/test_raster_properties_entry_type.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.raster_properties_entry_type import RasterPropertiesEntryType # noqa: E501 +from geoengine_openapi_client.models.raster_properties_entry_type import RasterPropertiesEntryType class TestRasterPropertiesEntryType(unittest.TestCase): """RasterPropertiesEntryType unit test stubs""" diff --git a/python/test/test_raster_properties_key.py b/python/test/test_raster_properties_key.py index 7088d238..0e816086 100644 --- a/python/test/test_raster_properties_key.py +++ b/python/test/test_raster_properties_key.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.raster_properties_key import RasterPropertiesKey # noqa: E501 +from geoengine_openapi_client.models.raster_properties_key import RasterPropertiesKey class TestRasterPropertiesKey(unittest.TestCase): """RasterPropertiesKey unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> RasterPropertiesKey: """Test RasterPropertiesKey - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `RasterPropertiesKey` """ - model = RasterPropertiesKey() # noqa: E501 + model = RasterPropertiesKey() if include_optional: return RasterPropertiesKey( domain = '', diff --git a/python/test/test_raster_result_descriptor.py b/python/test/test_raster_result_descriptor.py index ace5ee51..8ce8ad2e 100644 --- a/python/test/test_raster_result_descriptor.py +++ b/python/test/test_raster_result_descriptor.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.raster_result_descriptor import RasterResultDescriptor # noqa: E501 +from geoengine_openapi_client.models.raster_result_descriptor import RasterResultDescriptor class TestRasterResultDescriptor(unittest.TestCase): """RasterResultDescriptor unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> RasterResultDescriptor: """Test RasterResultDescriptor - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `RasterResultDescriptor` """ - model = RasterResultDescriptor() # noqa: E501 + model = RasterResultDescriptor() if include_optional: return RasterResultDescriptor( bands = [ diff --git a/python/test/test_raster_stream_websocket_result_type.py b/python/test/test_raster_stream_websocket_result_type.py index 92c93284..75c66f77 100644 --- a/python/test/test_raster_stream_websocket_result_type.py +++ b/python/test/test_raster_stream_websocket_result_type.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.raster_stream_websocket_result_type import RasterStreamWebsocketResultType # noqa: E501 +from geoengine_openapi_client.models.raster_stream_websocket_result_type import RasterStreamWebsocketResultType class TestRasterStreamWebsocketResultType(unittest.TestCase): """RasterStreamWebsocketResultType unit test stubs""" diff --git a/python/test/test_raster_symbology.py b/python/test/test_raster_symbology.py index 4e71494f..ce821972 100644 --- a/python/test/test_raster_symbology.py +++ b/python/test/test_raster_symbology.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.raster_symbology import RasterSymbology # noqa: E501 +from geoengine_openapi_client.models.raster_symbology import RasterSymbology class TestRasterSymbology(unittest.TestCase): """RasterSymbology unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> RasterSymbology: """Test RasterSymbology - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `RasterSymbology` """ - model = RasterSymbology() # noqa: E501 + model = RasterSymbology() if include_optional: return RasterSymbology( opacity = 1.337, diff --git a/python/test/test_resource.py b/python/test/test_resource.py index d1e154fa..894fcc67 100644 --- a/python/test/test_resource.py +++ b/python/test/test_resource.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.resource import Resource # noqa: E501 +from geoengine_openapi_client.models.resource import Resource class TestResource(unittest.TestCase): """Resource unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> Resource: """Test Resource - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `Resource` """ - model = Resource() # noqa: E501 + model = Resource() if include_optional: return Resource( id = '', diff --git a/python/test/test_resource_id.py b/python/test/test_resource_id.py index 8a75a079..c7bff3e9 100644 --- a/python/test/test_resource_id.py +++ b/python/test/test_resource_id.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.resource_id import ResourceId # noqa: E501 +from geoengine_openapi_client.models.resource_id import ResourceId class TestResourceId(unittest.TestCase): """ResourceId unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> ResourceId: """Test ResourceId - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `ResourceId` """ - model = ResourceId() # noqa: E501 + model = ResourceId() if include_optional: return ResourceId( id = '', diff --git a/python/test/test_resource_id_dataset_id.py b/python/test/test_resource_id_dataset_id.py index ca158c6d..4f864c27 100644 --- a/python/test/test_resource_id_dataset_id.py +++ b/python/test/test_resource_id_dataset_id.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.resource_id_dataset_id import ResourceIdDatasetId # noqa: E501 +from geoengine_openapi_client.models.resource_id_dataset_id import ResourceIdDatasetId class TestResourceIdDatasetId(unittest.TestCase): """ResourceIdDatasetId unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> ResourceIdDatasetId: """Test ResourceIdDatasetId - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `ResourceIdDatasetId` """ - model = ResourceIdDatasetId() # noqa: E501 + model = ResourceIdDatasetId() if include_optional: return ResourceIdDatasetId( id = '', diff --git a/python/test/test_resource_id_layer.py b/python/test/test_resource_id_layer.py index e8e99cdc..4806f8e6 100644 --- a/python/test/test_resource_id_layer.py +++ b/python/test/test_resource_id_layer.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.resource_id_layer import ResourceIdLayer # noqa: E501 +from geoengine_openapi_client.models.resource_id_layer import ResourceIdLayer class TestResourceIdLayer(unittest.TestCase): """ResourceIdLayer unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> ResourceIdLayer: """Test ResourceIdLayer - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `ResourceIdLayer` """ - model = ResourceIdLayer() # noqa: E501 + model = ResourceIdLayer() if include_optional: return ResourceIdLayer( id = '', diff --git a/python/test/test_resource_id_layer_collection.py b/python/test/test_resource_id_layer_collection.py index 9ece6b13..93857fbb 100644 --- a/python/test/test_resource_id_layer_collection.py +++ b/python/test/test_resource_id_layer_collection.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.resource_id_layer_collection import ResourceIdLayerCollection # noqa: E501 +from geoengine_openapi_client.models.resource_id_layer_collection import ResourceIdLayerCollection class TestResourceIdLayerCollection(unittest.TestCase): """ResourceIdLayerCollection unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> ResourceIdLayerCollection: """Test ResourceIdLayerCollection - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `ResourceIdLayerCollection` """ - model = ResourceIdLayerCollection() # noqa: E501 + model = ResourceIdLayerCollection() if include_optional: return ResourceIdLayerCollection( id = '', diff --git a/python/test/test_resource_id_ml_model.py b/python/test/test_resource_id_ml_model.py index d9b36b52..21027e05 100644 --- a/python/test/test_resource_id_ml_model.py +++ b/python/test/test_resource_id_ml_model.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.resource_id_ml_model import ResourceIdMlModel # noqa: E501 +from geoengine_openapi_client.models.resource_id_ml_model import ResourceIdMlModel class TestResourceIdMlModel(unittest.TestCase): """ResourceIdMlModel unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> ResourceIdMlModel: """Test ResourceIdMlModel - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `ResourceIdMlModel` """ - model = ResourceIdMlModel() # noqa: E501 + model = ResourceIdMlModel() if include_optional: return ResourceIdMlModel( id = '', diff --git a/python/test/test_resource_id_project.py b/python/test/test_resource_id_project.py index 102ae43d..d096b54b 100644 --- a/python/test/test_resource_id_project.py +++ b/python/test/test_resource_id_project.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.resource_id_project import ResourceIdProject # noqa: E501 +from geoengine_openapi_client.models.resource_id_project import ResourceIdProject class TestResourceIdProject(unittest.TestCase): """ResourceIdProject unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> ResourceIdProject: """Test ResourceIdProject - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `ResourceIdProject` """ - model = ResourceIdProject() # noqa: E501 + model = ResourceIdProject() if include_optional: return ResourceIdProject( id = '', diff --git a/python/test/test_role.py b/python/test/test_role.py index 8e8a6959..34e13116 100644 --- a/python/test/test_role.py +++ b/python/test/test_role.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.role import Role # noqa: E501 +from geoengine_openapi_client.models.role import Role class TestRole(unittest.TestCase): """Role unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> Role: """Test Role - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `Role` """ - model = Role() # noqa: E501 + model = Role() if include_optional: return Role( id = '', diff --git a/python/test/test_role_description.py b/python/test/test_role_description.py index db3ea38e..143659c4 100644 --- a/python/test/test_role_description.py +++ b/python/test/test_role_description.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.role_description import RoleDescription # noqa: E501 +from geoengine_openapi_client.models.role_description import RoleDescription class TestRoleDescription(unittest.TestCase): """RoleDescription unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> RoleDescription: """Test RoleDescription - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `RoleDescription` """ - model = RoleDescription() # noqa: E501 + model = RoleDescription() if include_optional: return RoleDescription( individual = True, diff --git a/python/test/test_search_capabilities.py b/python/test/test_search_capabilities.py index 34059791..024c9732 100644 --- a/python/test/test_search_capabilities.py +++ b/python/test/test_search_capabilities.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.search_capabilities import SearchCapabilities # noqa: E501 +from geoengine_openapi_client.models.search_capabilities import SearchCapabilities class TestSearchCapabilities(unittest.TestCase): """SearchCapabilities unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> SearchCapabilities: """Test SearchCapabilities - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `SearchCapabilities` """ - model = SearchCapabilities() # noqa: E501 + model = SearchCapabilities() if include_optional: return SearchCapabilities( autocomplete = True, diff --git a/python/test/test_search_type.py b/python/test/test_search_type.py index 3d8008c5..b0f33dbe 100644 --- a/python/test/test_search_type.py +++ b/python/test/test_search_type.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.search_type import SearchType # noqa: E501 +from geoengine_openapi_client.models.search_type import SearchType class TestSearchType(unittest.TestCase): """SearchType unit test stubs""" diff --git a/python/test/test_search_types.py b/python/test/test_search_types.py index 4cf1dc0e..943da8d1 100644 --- a/python/test/test_search_types.py +++ b/python/test/test_search_types.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.search_types import SearchTypes # noqa: E501 +from geoengine_openapi_client.models.search_types import SearchTypes class TestSearchTypes(unittest.TestCase): """SearchTypes unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> SearchTypes: """Test SearchTypes - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `SearchTypes` """ - model = SearchTypes() # noqa: E501 + model = SearchTypes() if include_optional: return SearchTypes( fulltext = True, diff --git a/python/test/test_server_info.py b/python/test/test_server_info.py index 6d94e0ca..899a5e5f 100644 --- a/python/test/test_server_info.py +++ b/python/test/test_server_info.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.server_info import ServerInfo # noqa: E501 +from geoengine_openapi_client.models.server_info import ServerInfo class TestServerInfo(unittest.TestCase): """ServerInfo unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> ServerInfo: """Test ServerInfo - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `ServerInfo` """ - model = ServerInfo() # noqa: E501 + model = ServerInfo() if include_optional: return ServerInfo( build_date = '', diff --git a/python/test/test_session_api.py b/python/test/test_session_api.py index b4cdebc9..e140b269 100644 --- a/python/test/test_session_api.py +++ b/python/test/test_session_api.py @@ -15,14 +15,14 @@ import unittest -from geoengine_openapi_client.api.session_api import SessionApi # noqa: E501 +from geoengine_openapi_client.api.session_api import SessionApi class TestSessionApi(unittest.TestCase): """SessionApi unit test stubs""" def setUp(self) -> None: - self.api = SessionApi() # noqa: E501 + self.api = SessionApi() def tearDown(self) -> None: pass @@ -30,49 +30,49 @@ def tearDown(self) -> None: def test_anonymous_handler(self) -> None: """Test case for anonymous_handler - Creates session for anonymous user. The session's id serves as a Bearer token for requests. # noqa: E501 + Creates session for anonymous user. The session's id serves as a Bearer token for requests. """ pass def test_login_handler(self) -> None: """Test case for login_handler - Creates a session by providing user credentials. The session's id serves as a Bearer token for requests. # noqa: E501 + Creates a session by providing user credentials. The session's id serves as a Bearer token for requests. """ pass def test_logout_handler(self) -> None: """Test case for logout_handler - Ends a session. # noqa: E501 + Ends a session. """ pass def test_oidc_init(self) -> None: """Test case for oidc_init - Initializes the Open Id Connect login procedure by requesting a parametrized url to the configured Id Provider. # noqa: E501 + Initializes the Open Id Connect login procedure by requesting a parametrized url to the configured Id Provider. """ pass def test_oidc_login(self) -> None: """Test case for oidc_login - Creates a session for a user via a login with Open Id Connect. # noqa: E501 + Creates a session for a user via a login with Open Id Connect. This call must be preceded by a call to oidcInit and match the parameters of that call. """ pass def test_register_user_handler(self) -> None: """Test case for register_user_handler - Registers a user. # noqa: E501 + Registers a user. """ pass def test_session_handler(self) -> None: """Test case for session_handler - Retrieves details about the current session. # noqa: E501 + Retrieves details about the current session. """ pass diff --git a/python/test/test_single_band_raster_colorizer.py b/python/test/test_single_band_raster_colorizer.py index 23fd8b57..8a4cb660 100644 --- a/python/test/test_single_band_raster_colorizer.py +++ b/python/test/test_single_band_raster_colorizer.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.single_band_raster_colorizer import SingleBandRasterColorizer # noqa: E501 +from geoengine_openapi_client.models.single_band_raster_colorizer import SingleBandRasterColorizer class TestSingleBandRasterColorizer(unittest.TestCase): """SingleBandRasterColorizer unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> SingleBandRasterColorizer: """Test SingleBandRasterColorizer - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `SingleBandRasterColorizer` """ - model = SingleBandRasterColorizer() # noqa: E501 + model = SingleBandRasterColorizer() if include_optional: return SingleBandRasterColorizer( band = 0, diff --git a/python/test/test_spatial_partition2_d.py b/python/test/test_spatial_partition2_d.py index 2b5d894c..51061d34 100644 --- a/python/test/test_spatial_partition2_d.py +++ b/python/test/test_spatial_partition2_d.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.spatial_partition2_d import SpatialPartition2D # noqa: E501 +from geoengine_openapi_client.models.spatial_partition2_d import SpatialPartition2D class TestSpatialPartition2D(unittest.TestCase): """SpatialPartition2D unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> SpatialPartition2D: """Test SpatialPartition2D - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `SpatialPartition2D` """ - model = SpatialPartition2D() # noqa: E501 + model = SpatialPartition2D() if include_optional: return SpatialPartition2D( lower_right_coordinate = geoengine_openapi_client.models.coordinate2_d.Coordinate2D( diff --git a/python/test/test_spatial_reference_authority.py b/python/test/test_spatial_reference_authority.py index b95daa96..5776d893 100644 --- a/python/test/test_spatial_reference_authority.py +++ b/python/test/test_spatial_reference_authority.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.spatial_reference_authority import SpatialReferenceAuthority # noqa: E501 +from geoengine_openapi_client.models.spatial_reference_authority import SpatialReferenceAuthority class TestSpatialReferenceAuthority(unittest.TestCase): """SpatialReferenceAuthority unit test stubs""" diff --git a/python/test/test_spatial_reference_specification.py b/python/test/test_spatial_reference_specification.py index b1dd24e8..62dc8888 100644 --- a/python/test/test_spatial_reference_specification.py +++ b/python/test/test_spatial_reference_specification.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.spatial_reference_specification import SpatialReferenceSpecification # noqa: E501 +from geoengine_openapi_client.models.spatial_reference_specification import SpatialReferenceSpecification class TestSpatialReferenceSpecification(unittest.TestCase): """SpatialReferenceSpecification unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> SpatialReferenceSpecification: """Test SpatialReferenceSpecification - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `SpatialReferenceSpecification` """ - model = SpatialReferenceSpecification() # noqa: E501 + model = SpatialReferenceSpecification() if include_optional: return SpatialReferenceSpecification( axis_labels = [ diff --git a/python/test/test_spatial_references_api.py b/python/test/test_spatial_references_api.py index a958d0ec..a118fff6 100644 --- a/python/test/test_spatial_references_api.py +++ b/python/test/test_spatial_references_api.py @@ -15,14 +15,14 @@ import unittest -from geoengine_openapi_client.api.spatial_references_api import SpatialReferencesApi # noqa: E501 +from geoengine_openapi_client.api.spatial_references_api import SpatialReferencesApi class TestSpatialReferencesApi(unittest.TestCase): """SpatialReferencesApi unit test stubs""" def setUp(self) -> None: - self.api = SpatialReferencesApi() # noqa: E501 + self.api = SpatialReferencesApi() def tearDown(self) -> None: pass diff --git a/python/test/test_spatial_resolution.py b/python/test/test_spatial_resolution.py index a410eec7..e6a7c7ef 100644 --- a/python/test/test_spatial_resolution.py +++ b/python/test/test_spatial_resolution.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.spatial_resolution import SpatialResolution # noqa: E501 +from geoengine_openapi_client.models.spatial_resolution import SpatialResolution class TestSpatialResolution(unittest.TestCase): """SpatialResolution unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> SpatialResolution: """Test SpatialResolution - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `SpatialResolution` """ - model = SpatialResolution() # noqa: E501 + model = SpatialResolution() if include_optional: return SpatialResolution( x = 1.337, diff --git a/python/test/test_st_rectangle.py b/python/test/test_st_rectangle.py index 27f21ce3..abd124bd 100644 --- a/python/test/test_st_rectangle.py +++ b/python/test/test_st_rectangle.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.st_rectangle import STRectangle # noqa: E501 +from geoengine_openapi_client.models.st_rectangle import STRectangle class TestSTRectangle(unittest.TestCase): """STRectangle unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> STRectangle: """Test STRectangle - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `STRectangle` """ - model = STRectangle() # noqa: E501 + model = STRectangle() if include_optional: return STRectangle( bounding_box = geoengine_openapi_client.models.bounding_box2_d.BoundingBox2D( diff --git a/python/test/test_static_meta_data.py b/python/test/test_static_meta_data.py new file mode 100644 index 00000000..ad3632d1 --- /dev/null +++ b/python/test/test_static_meta_data.py @@ -0,0 +1,157 @@ +# coding: utf-8 + +""" + Geo Engine API + + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) + + The version of the OpenAPI document: 0.8.0 + Contact: dev@geoengine.de + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from geoengine_openapi_client.models.static_meta_data import StaticMetaData + +class TestStaticMetaData(unittest.TestCase): + """StaticMetaData unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> StaticMetaData: + """Test StaticMetaData + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `StaticMetaData` + """ + model = StaticMetaData() + if include_optional: + return StaticMetaData( + loading_info = geoengine_openapi_client.models.ogr_source_dataset.OgrSourceDataset( + attribute_query = '', + cache_ttl = 0, + columns = geoengine_openapi_client.models.ogr_source_column_spec.OgrSourceColumnSpec( + bool = [ + '' + ], + datetime = [ + '' + ], + float = [ + '' + ], + format_specifics = null, + int = [ + '' + ], + rename = { + 'key' : '' + }, + text = [ + '' + ], + x = '', + y = '', ), + data_type = 'Data', + default_geometry = null, + file_name = '', + force_ogr_spatial_filter = True, + force_ogr_time_filter = True, + layer_name = '', + on_error = 'ignore', + sql_query = '', + time = null, ), + result_descriptor = geoengine_openapi_client.models.vector_result_descriptor.VectorResultDescriptor( + bbox = geoengine_openapi_client.models.bounding_box2_d.BoundingBox2D( + lower_left_coordinate = geoengine_openapi_client.models.coordinate2_d.Coordinate2D( + x = 1.337, + y = 1.337, ), + upper_right_coordinate = geoengine_openapi_client.models.coordinate2_d.Coordinate2D( + x = 1.337, + y = 1.337, ), ), + columns = { + 'key' : geoengine_openapi_client.models.vector_column_info.VectorColumnInfo( + data_type = 'category', + measurement = null, ) + }, + data_type = 'Data', + spatial_reference = '', + time = geoengine_openapi_client.models.time_interval.TimeInterval( + end = 56, + start = 56, ), ), + type = 'OgrMetaData' + ) + else: + return StaticMetaData( + loading_info = geoengine_openapi_client.models.ogr_source_dataset.OgrSourceDataset( + attribute_query = '', + cache_ttl = 0, + columns = geoengine_openapi_client.models.ogr_source_column_spec.OgrSourceColumnSpec( + bool = [ + '' + ], + datetime = [ + '' + ], + float = [ + '' + ], + format_specifics = null, + int = [ + '' + ], + rename = { + 'key' : '' + }, + text = [ + '' + ], + x = '', + y = '', ), + data_type = 'Data', + default_geometry = null, + file_name = '', + force_ogr_spatial_filter = True, + force_ogr_time_filter = True, + layer_name = '', + on_error = 'ignore', + sql_query = '', + time = null, ), + result_descriptor = geoengine_openapi_client.models.vector_result_descriptor.VectorResultDescriptor( + bbox = geoengine_openapi_client.models.bounding_box2_d.BoundingBox2D( + lower_left_coordinate = geoengine_openapi_client.models.coordinate2_d.Coordinate2D( + x = 1.337, + y = 1.337, ), + upper_right_coordinate = geoengine_openapi_client.models.coordinate2_d.Coordinate2D( + x = 1.337, + y = 1.337, ), ), + columns = { + 'key' : geoengine_openapi_client.models.vector_column_info.VectorColumnInfo( + data_type = 'category', + measurement = null, ) + }, + data_type = 'Data', + spatial_reference = '', + time = geoengine_openapi_client.models.time_interval.TimeInterval( + end = 56, + start = 56, ), ), + type = 'OgrMetaData', + ) + """ + + def testStaticMetaData(self): + """Test StaticMetaData""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/python/test/test_static_number_param.py b/python/test/test_static_number_param.py index e90b10ec..4e2201c7 100644 --- a/python/test/test_static_number_param.py +++ b/python/test/test_static_number_param.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.static_number_param import StaticNumberParam # noqa: E501 +from geoengine_openapi_client.models.static_number_param import StaticNumberParam class TestStaticNumberParam(unittest.TestCase): """StaticNumberParam unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> StaticNumberParam: """Test StaticNumberParam - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `StaticNumberParam` """ - model = StaticNumberParam() # noqa: E501 + model = StaticNumberParam() if include_optional: return StaticNumberParam( type = 'static', diff --git a/python/test/test_stroke_param.py b/python/test/test_stroke_param.py index 99cec95c..5bd39328 100644 --- a/python/test/test_stroke_param.py +++ b/python/test/test_stroke_param.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.stroke_param import StrokeParam # noqa: E501 +from geoengine_openapi_client.models.stroke_param import StrokeParam class TestStrokeParam(unittest.TestCase): """StrokeParam unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> StrokeParam: """Test StrokeParam - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `StrokeParam` """ - model = StrokeParam() # noqa: E501 + model = StrokeParam() if include_optional: return StrokeParam( color = None, diff --git a/python/test/test_suggest_meta_data.py b/python/test/test_suggest_meta_data.py index 3407e716..2b97b38c 100644 --- a/python/test/test_suggest_meta_data.py +++ b/python/test/test_suggest_meta_data.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.suggest_meta_data import SuggestMetaData # noqa: E501 +from geoengine_openapi_client.models.suggest_meta_data import SuggestMetaData class TestSuggestMetaData(unittest.TestCase): """SuggestMetaData unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> SuggestMetaData: """Test SuggestMetaData - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `SuggestMetaData` """ - model = SuggestMetaData() # noqa: E501 + model = SuggestMetaData() if include_optional: return SuggestMetaData( data_path = None, diff --git a/python/test/test_symbology.py b/python/test/test_symbology.py index ab723a76..31249fb7 100644 --- a/python/test/test_symbology.py +++ b/python/test/test_symbology.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.symbology import Symbology # noqa: E501 +from geoengine_openapi_client.models.symbology import Symbology class TestSymbology(unittest.TestCase): """Symbology unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> Symbology: """Test Symbology - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `Symbology` """ - model = Symbology() # noqa: E501 + model = Symbology() if include_optional: return Symbology( opacity = 1.337, diff --git a/python/test/test_task_abort_options.py b/python/test/test_task_abort_options.py index 8ea2879f..6a43c00c 100644 --- a/python/test/test_task_abort_options.py +++ b/python/test/test_task_abort_options.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.task_abort_options import TaskAbortOptions # noqa: E501 +from geoengine_openapi_client.models.task_abort_options import TaskAbortOptions class TestTaskAbortOptions(unittest.TestCase): """TaskAbortOptions unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> TaskAbortOptions: """Test TaskAbortOptions - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `TaskAbortOptions` """ - model = TaskAbortOptions() # noqa: E501 + model = TaskAbortOptions() if include_optional: return TaskAbortOptions( force = True diff --git a/python/test/test_task_filter.py b/python/test/test_task_filter.py index 40b81b59..194640d9 100644 --- a/python/test/test_task_filter.py +++ b/python/test/test_task_filter.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.task_filter import TaskFilter # noqa: E501 +from geoengine_openapi_client.models.task_filter import TaskFilter class TestTaskFilter(unittest.TestCase): """TaskFilter unit test stubs""" diff --git a/python/test/test_task_list_options.py b/python/test/test_task_list_options.py index ae2efa97..93df6e77 100644 --- a/python/test/test_task_list_options.py +++ b/python/test/test_task_list_options.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.task_list_options import TaskListOptions # noqa: E501 +from geoengine_openapi_client.models.task_list_options import TaskListOptions class TestTaskListOptions(unittest.TestCase): """TaskListOptions unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> TaskListOptions: """Test TaskListOptions - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `TaskListOptions` """ - model = TaskListOptions() # noqa: E501 + model = TaskListOptions() if include_optional: return TaskListOptions( filter = 'running', diff --git a/python/test/test_task_response.py b/python/test/test_task_response.py index c7a3fa26..a1d5b4c3 100644 --- a/python/test/test_task_response.py +++ b/python/test/test_task_response.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.task_response import TaskResponse # noqa: E501 +from geoengine_openapi_client.models.task_response import TaskResponse class TestTaskResponse(unittest.TestCase): """TaskResponse unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> TaskResponse: """Test TaskResponse - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `TaskResponse` """ - model = TaskResponse() # noqa: E501 + model = TaskResponse() if include_optional: return TaskResponse( task_id = '' diff --git a/python/test/test_task_status.py b/python/test/test_task_status.py index 2a845e9c..bbda84ae 100644 --- a/python/test/test_task_status.py +++ b/python/test/test_task_status.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.task_status import TaskStatus # noqa: E501 +from geoengine_openapi_client.models.task_status import TaskStatus class TestTaskStatus(unittest.TestCase): """TaskStatus unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> TaskStatus: """Test TaskStatus - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `TaskStatus` """ - model = TaskStatus() # noqa: E501 + model = TaskStatus() if include_optional: return TaskStatus( description = '', diff --git a/python/test/test_task_status_aborted.py b/python/test/test_task_status_aborted.py index 0cf42902..3340b36a 100644 --- a/python/test/test_task_status_aborted.py +++ b/python/test/test_task_status_aborted.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.task_status_aborted import TaskStatusAborted # noqa: E501 +from geoengine_openapi_client.models.task_status_aborted import TaskStatusAborted class TestTaskStatusAborted(unittest.TestCase): """TaskStatusAborted unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> TaskStatusAborted: """Test TaskStatusAborted - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `TaskStatusAborted` """ - model = TaskStatusAborted() # noqa: E501 + model = TaskStatusAborted() if include_optional: return TaskStatusAborted( clean_up = None, diff --git a/python/test/test_task_status_completed.py b/python/test/test_task_status_completed.py index f9d41606..641cd787 100644 --- a/python/test/test_task_status_completed.py +++ b/python/test/test_task_status_completed.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.task_status_completed import TaskStatusCompleted # noqa: E501 +from geoengine_openapi_client.models.task_status_completed import TaskStatusCompleted class TestTaskStatusCompleted(unittest.TestCase): """TaskStatusCompleted unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> TaskStatusCompleted: """Test TaskStatusCompleted - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `TaskStatusCompleted` """ - model = TaskStatusCompleted() # noqa: E501 + model = TaskStatusCompleted() if include_optional: return TaskStatusCompleted( description = '', diff --git a/python/test/test_task_status_failed.py b/python/test/test_task_status_failed.py index 40d3131d..1ce8d04f 100644 --- a/python/test/test_task_status_failed.py +++ b/python/test/test_task_status_failed.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.task_status_failed import TaskStatusFailed # noqa: E501 +from geoengine_openapi_client.models.task_status_failed import TaskStatusFailed class TestTaskStatusFailed(unittest.TestCase): """TaskStatusFailed unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> TaskStatusFailed: """Test TaskStatusFailed - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `TaskStatusFailed` """ - model = TaskStatusFailed() # noqa: E501 + model = TaskStatusFailed() if include_optional: return TaskStatusFailed( clean_up = None, diff --git a/python/test/test_task_status_running.py b/python/test/test_task_status_running.py index 4f26fdc8..00739830 100644 --- a/python/test/test_task_status_running.py +++ b/python/test/test_task_status_running.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.task_status_running import TaskStatusRunning # noqa: E501 +from geoengine_openapi_client.models.task_status_running import TaskStatusRunning class TestTaskStatusRunning(unittest.TestCase): """TaskStatusRunning unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> TaskStatusRunning: """Test TaskStatusRunning - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `TaskStatusRunning` """ - model = TaskStatusRunning() # noqa: E501 + model = TaskStatusRunning() if include_optional: return TaskStatusRunning( description = '', diff --git a/python/test/test_task_status_with_id.py b/python/test/test_task_status_with_id.py index 4114e961..dc075dd4 100644 --- a/python/test/test_task_status_with_id.py +++ b/python/test/test_task_status_with_id.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.task_status_with_id import TaskStatusWithId # noqa: E501 +from geoengine_openapi_client.models.task_status_with_id import TaskStatusWithId class TestTaskStatusWithId(unittest.TestCase): """TaskStatusWithId unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> TaskStatusWithId: """Test TaskStatusWithId - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `TaskStatusWithId` """ - model = TaskStatusWithId() # noqa: E501 + model = TaskStatusWithId() if include_optional: return TaskStatusWithId( task_id = '' diff --git a/python/test/test_tasks_api.py b/python/test/test_tasks_api.py index 0d732241..c99b474e 100644 --- a/python/test/test_tasks_api.py +++ b/python/test/test_tasks_api.py @@ -15,14 +15,14 @@ import unittest -from geoengine_openapi_client.api.tasks_api import TasksApi # noqa: E501 +from geoengine_openapi_client.api.tasks_api import TasksApi class TestTasksApi(unittest.TestCase): """TasksApi unit test stubs""" def setUp(self) -> None: - self.api = TasksApi() # noqa: E501 + self.api = TasksApi() def tearDown(self) -> None: pass @@ -30,21 +30,21 @@ def tearDown(self) -> None: def test_abort_handler(self) -> None: """Test case for abort_handler - Abort a running task. # noqa: E501 + Abort a running task. """ pass def test_list_handler(self) -> None: """Test case for list_handler - Retrieve the status of all tasks. # noqa: E501 + Retrieve the status of all tasks. """ pass def test_status_handler(self) -> None: """Test case for status_handler - Retrieve the status of a task. # noqa: E501 + Retrieve the status of a task. """ pass diff --git a/python/test/test_text_symbology.py b/python/test/test_text_symbology.py index 8f90ff76..c2606320 100644 --- a/python/test/test_text_symbology.py +++ b/python/test/test_text_symbology.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.text_symbology import TextSymbology # noqa: E501 +from geoengine_openapi_client.models.text_symbology import TextSymbology class TestTextSymbology(unittest.TestCase): """TextSymbology unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> TextSymbology: """Test TextSymbology - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `TextSymbology` """ - model = TextSymbology() # noqa: E501 + model = TextSymbology() if include_optional: return TextSymbology( attribute = '', diff --git a/python/test/test_time_granularity.py b/python/test/test_time_granularity.py index 9428c7ae..ffe13cae 100644 --- a/python/test/test_time_granularity.py +++ b/python/test/test_time_granularity.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.time_granularity import TimeGranularity # noqa: E501 +from geoengine_openapi_client.models.time_granularity import TimeGranularity class TestTimeGranularity(unittest.TestCase): """TimeGranularity unit test stubs""" diff --git a/python/test/test_time_interval.py b/python/test/test_time_interval.py index 90b54ad3..0654bfce 100644 --- a/python/test/test_time_interval.py +++ b/python/test/test_time_interval.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.time_interval import TimeInterval # noqa: E501 +from geoengine_openapi_client.models.time_interval import TimeInterval class TestTimeInterval(unittest.TestCase): """TimeInterval unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> TimeInterval: """Test TimeInterval - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `TimeInterval` """ - model = TimeInterval() # noqa: E501 + model = TimeInterval() if include_optional: return TimeInterval( end = 56, diff --git a/python/test/test_time_reference.py b/python/test/test_time_reference.py index 015adede..547eefd2 100644 --- a/python/test/test_time_reference.py +++ b/python/test/test_time_reference.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.time_reference import TimeReference # noqa: E501 +from geoengine_openapi_client.models.time_reference import TimeReference class TestTimeReference(unittest.TestCase): """TimeReference unit test stubs""" diff --git a/python/test/test_time_step.py b/python/test/test_time_step.py index 4eeb13a4..1c311640 100644 --- a/python/test/test_time_step.py +++ b/python/test/test_time_step.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.time_step import TimeStep # noqa: E501 +from geoengine_openapi_client.models.time_step import TimeStep class TestTimeStep(unittest.TestCase): """TimeStep unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> TimeStep: """Test TimeStep - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `TimeStep` """ - model = TimeStep() # noqa: E501 + model = TimeStep() if include_optional: return TimeStep( granularity = 'millis', diff --git a/python/test/test_typed_geometry.py b/python/test/test_typed_geometry.py index 7f7a4e23..512ef82a 100644 --- a/python/test/test_typed_geometry.py +++ b/python/test/test_typed_geometry.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.typed_geometry import TypedGeometry # noqa: E501 +from geoengine_openapi_client.models.typed_geometry import TypedGeometry class TestTypedGeometry(unittest.TestCase): """TypedGeometry unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> TypedGeometry: """Test TypedGeometry - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `TypedGeometry` """ - model = TypedGeometry() # noqa: E501 + model = TypedGeometry() if include_optional: return TypedGeometry( data = None, diff --git a/python/test/test_typed_geometry_one_of.py b/python/test/test_typed_geometry_one_of.py index 11d99add..a5122a2c 100644 --- a/python/test/test_typed_geometry_one_of.py +++ b/python/test/test_typed_geometry_one_of.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.typed_geometry_one_of import TypedGeometryOneOf # noqa: E501 +from geoengine_openapi_client.models.typed_geometry_one_of import TypedGeometryOneOf class TestTypedGeometryOneOf(unittest.TestCase): """TypedGeometryOneOf unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> TypedGeometryOneOf: """Test TypedGeometryOneOf - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `TypedGeometryOneOf` """ - model = TypedGeometryOneOf() # noqa: E501 + model = TypedGeometryOneOf() if include_optional: return TypedGeometryOneOf( data = None diff --git a/python/test/test_typed_geometry_one_of1.py b/python/test/test_typed_geometry_one_of1.py index 1d41e8fa..51b07391 100644 --- a/python/test/test_typed_geometry_one_of1.py +++ b/python/test/test_typed_geometry_one_of1.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.typed_geometry_one_of1 import TypedGeometryOneOf1 # noqa: E501 +from geoengine_openapi_client.models.typed_geometry_one_of1 import TypedGeometryOneOf1 class TestTypedGeometryOneOf1(unittest.TestCase): """TypedGeometryOneOf1 unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> TypedGeometryOneOf1: """Test TypedGeometryOneOf1 - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `TypedGeometryOneOf1` """ - model = TypedGeometryOneOf1() # noqa: E501 + model = TypedGeometryOneOf1() if include_optional: return TypedGeometryOneOf1( multi_point = geoengine_openapi_client.models.multi_point.MultiPoint( diff --git a/python/test/test_typed_geometry_one_of2.py b/python/test/test_typed_geometry_one_of2.py index 9b367c04..c1c6b8cf 100644 --- a/python/test/test_typed_geometry_one_of2.py +++ b/python/test/test_typed_geometry_one_of2.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.typed_geometry_one_of2 import TypedGeometryOneOf2 # noqa: E501 +from geoengine_openapi_client.models.typed_geometry_one_of2 import TypedGeometryOneOf2 class TestTypedGeometryOneOf2(unittest.TestCase): """TypedGeometryOneOf2 unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> TypedGeometryOneOf2: """Test TypedGeometryOneOf2 - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `TypedGeometryOneOf2` """ - model = TypedGeometryOneOf2() # noqa: E501 + model = TypedGeometryOneOf2() if include_optional: return TypedGeometryOneOf2( multi_line_string = geoengine_openapi_client.models.multi_line_string.MultiLineString( diff --git a/python/test/test_typed_geometry_one_of3.py b/python/test/test_typed_geometry_one_of3.py index 53949b8b..80797b07 100644 --- a/python/test/test_typed_geometry_one_of3.py +++ b/python/test/test_typed_geometry_one_of3.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.typed_geometry_one_of3 import TypedGeometryOneOf3 # noqa: E501 +from geoengine_openapi_client.models.typed_geometry_one_of3 import TypedGeometryOneOf3 class TestTypedGeometryOneOf3(unittest.TestCase): """TypedGeometryOneOf3 unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> TypedGeometryOneOf3: """Test TypedGeometryOneOf3 - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `TypedGeometryOneOf3` """ - model = TypedGeometryOneOf3() # noqa: E501 + model = TypedGeometryOneOf3() if include_optional: return TypedGeometryOneOf3( multi_polygon = geoengine_openapi_client.models.multi_polygon.MultiPolygon( diff --git a/python/test/test_typed_operator.py b/python/test/test_typed_operator.py index 8292b549..b50075f2 100644 --- a/python/test/test_typed_operator.py +++ b/python/test/test_typed_operator.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.typed_operator import TypedOperator # noqa: E501 +from geoengine_openapi_client.models.typed_operator import TypedOperator class TestTypedOperator(unittest.TestCase): """TypedOperator unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> TypedOperator: """Test TypedOperator - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `TypedOperator` """ - model = TypedOperator() # noqa: E501 + model = TypedOperator() if include_optional: return TypedOperator( operator = geoengine_openapi_client.models.typed_operator_operator.TypedOperator_operator( diff --git a/python/test/test_typed_operator_operator.py b/python/test/test_typed_operator_operator.py index 2083eb93..ff8fe13c 100644 --- a/python/test/test_typed_operator_operator.py +++ b/python/test/test_typed_operator_operator.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.typed_operator_operator import TypedOperatorOperator # noqa: E501 +from geoengine_openapi_client.models.typed_operator_operator import TypedOperatorOperator class TestTypedOperatorOperator(unittest.TestCase): """TypedOperatorOperator unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> TypedOperatorOperator: """Test TypedOperatorOperator - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `TypedOperatorOperator` """ - model = TypedOperatorOperator() # noqa: E501 + model = TypedOperatorOperator() if include_optional: return TypedOperatorOperator( params = geoengine_openapi_client.models.params.params(), diff --git a/python/test/test_typed_plot_result_descriptor.py b/python/test/test_typed_plot_result_descriptor.py index 8344d178..7a2c8948 100644 --- a/python/test/test_typed_plot_result_descriptor.py +++ b/python/test/test_typed_plot_result_descriptor.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.typed_plot_result_descriptor import TypedPlotResultDescriptor # noqa: E501 +from geoengine_openapi_client.models.typed_plot_result_descriptor import TypedPlotResultDescriptor class TestTypedPlotResultDescriptor(unittest.TestCase): """TypedPlotResultDescriptor unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> TypedPlotResultDescriptor: """Test TypedPlotResultDescriptor - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `TypedPlotResultDescriptor` """ - model = TypedPlotResultDescriptor() # noqa: E501 + model = TypedPlotResultDescriptor() if include_optional: return TypedPlotResultDescriptor( bbox = geoengine_openapi_client.models.bounding_box2_d.BoundingBox2D( diff --git a/python/test/test_typed_raster_result_descriptor.py b/python/test/test_typed_raster_result_descriptor.py index 259066f5..906e03b7 100644 --- a/python/test/test_typed_raster_result_descriptor.py +++ b/python/test/test_typed_raster_result_descriptor.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.typed_raster_result_descriptor import TypedRasterResultDescriptor # noqa: E501 +from geoengine_openapi_client.models.typed_raster_result_descriptor import TypedRasterResultDescriptor class TestTypedRasterResultDescriptor(unittest.TestCase): """TypedRasterResultDescriptor unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> TypedRasterResultDescriptor: """Test TypedRasterResultDescriptor - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `TypedRasterResultDescriptor` """ - model = TypedRasterResultDescriptor() # noqa: E501 + model = TypedRasterResultDescriptor() if include_optional: return TypedRasterResultDescriptor( bands = [ diff --git a/python/test/test_typed_result_descriptor.py b/python/test/test_typed_result_descriptor.py index b04a9eb1..fe0f85a1 100644 --- a/python/test/test_typed_result_descriptor.py +++ b/python/test/test_typed_result_descriptor.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.typed_result_descriptor import TypedResultDescriptor # noqa: E501 +from geoengine_openapi_client.models.typed_result_descriptor import TypedResultDescriptor class TestTypedResultDescriptor(unittest.TestCase): """TypedResultDescriptor unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> TypedResultDescriptor: """Test TypedResultDescriptor - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `TypedResultDescriptor` """ - model = TypedResultDescriptor() # noqa: E501 + model = TypedResultDescriptor() if include_optional: return TypedResultDescriptor( bbox = geoengine_openapi_client.models.bounding_box2_d.BoundingBox2D( diff --git a/python/test/test_typed_vector_result_descriptor.py b/python/test/test_typed_vector_result_descriptor.py index 7afe41cc..e5b9a81b 100644 --- a/python/test/test_typed_vector_result_descriptor.py +++ b/python/test/test_typed_vector_result_descriptor.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.typed_vector_result_descriptor import TypedVectorResultDescriptor # noqa: E501 +from geoengine_openapi_client.models.typed_vector_result_descriptor import TypedVectorResultDescriptor class TestTypedVectorResultDescriptor(unittest.TestCase): """TypedVectorResultDescriptor unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> TypedVectorResultDescriptor: """Test TypedVectorResultDescriptor - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `TypedVectorResultDescriptor` """ - model = TypedVectorResultDescriptor() # noqa: E501 + model = TypedVectorResultDescriptor() if include_optional: return TypedVectorResultDescriptor( bbox = geoengine_openapi_client.models.bounding_box2_d.BoundingBox2D( diff --git a/python/test/test_unitless_measurement.py b/python/test/test_unitless_measurement.py index 7bdb8e7b..323e39cd 100644 --- a/python/test/test_unitless_measurement.py +++ b/python/test/test_unitless_measurement.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.unitless_measurement import UnitlessMeasurement # noqa: E501 +from geoengine_openapi_client.models.unitless_measurement import UnitlessMeasurement class TestUnitlessMeasurement(unittest.TestCase): """UnitlessMeasurement unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> UnitlessMeasurement: """Test UnitlessMeasurement - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `UnitlessMeasurement` """ - model = UnitlessMeasurement() # noqa: E501 + model = UnitlessMeasurement() if include_optional: return UnitlessMeasurement( type = 'unitless' diff --git a/python/test/test_unix_time_stamp_type.py b/python/test/test_unix_time_stamp_type.py index 7a6d991e..370cb64e 100644 --- a/python/test/test_unix_time_stamp_type.py +++ b/python/test/test_unix_time_stamp_type.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.unix_time_stamp_type import UnixTimeStampType # noqa: E501 +from geoengine_openapi_client.models.unix_time_stamp_type import UnixTimeStampType class TestUnixTimeStampType(unittest.TestCase): """UnixTimeStampType unit test stubs""" diff --git a/python/test/test_update_dataset.py b/python/test/test_update_dataset.py index 9192c7fb..a5ede464 100644 --- a/python/test/test_update_dataset.py +++ b/python/test/test_update_dataset.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.update_dataset import UpdateDataset # noqa: E501 +from geoengine_openapi_client.models.update_dataset import UpdateDataset class TestUpdateDataset(unittest.TestCase): """UpdateDataset unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> UpdateDataset: """Test UpdateDataset - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `UpdateDataset` """ - model = UpdateDataset() # noqa: E501 + model = UpdateDataset() if include_optional: return UpdateDataset( description = '', diff --git a/python/test/test_update_layer.py b/python/test/test_update_layer.py index dca8f20c..a201b89b 100644 --- a/python/test/test_update_layer.py +++ b/python/test/test_update_layer.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.update_layer import UpdateLayer # noqa: E501 +from geoengine_openapi_client.models.update_layer import UpdateLayer class TestUpdateLayer(unittest.TestCase): """UpdateLayer unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> UpdateLayer: """Test UpdateLayer - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `UpdateLayer` """ - model = UpdateLayer() # noqa: E501 + model = UpdateLayer() if include_optional: return UpdateLayer( description = 'Example layer description', diff --git a/python/test/test_update_layer_collection.py b/python/test/test_update_layer_collection.py index ef07cd14..6a195e2a 100644 --- a/python/test/test_update_layer_collection.py +++ b/python/test/test_update_layer_collection.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.update_layer_collection import UpdateLayerCollection # noqa: E501 +from geoengine_openapi_client.models.update_layer_collection import UpdateLayerCollection class TestUpdateLayerCollection(unittest.TestCase): """UpdateLayerCollection unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> UpdateLayerCollection: """Test UpdateLayerCollection - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `UpdateLayerCollection` """ - model = UpdateLayerCollection() # noqa: E501 + model = UpdateLayerCollection() if include_optional: return UpdateLayerCollection( description = 'A description for an example collection', diff --git a/python/test/test_update_project.py b/python/test/test_update_project.py index 6453dbc9..095948e8 100644 --- a/python/test/test_update_project.py +++ b/python/test/test_update_project.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.update_project import UpdateProject # noqa: E501 +from geoengine_openapi_client.models.update_project import UpdateProject class TestUpdateProject(unittest.TestCase): """UpdateProject unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> UpdateProject: """Test UpdateProject - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `UpdateProject` """ - model = UpdateProject() # noqa: E501 + model = UpdateProject() if include_optional: return UpdateProject( bounds = geoengine_openapi_client.models.st_rectangle.STRectangle( diff --git a/python/test/test_update_quota.py b/python/test/test_update_quota.py index f793034d..2746eac8 100644 --- a/python/test/test_update_quota.py +++ b/python/test/test_update_quota.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.update_quota import UpdateQuota # noqa: E501 +from geoengine_openapi_client.models.update_quota import UpdateQuota class TestUpdateQuota(unittest.TestCase): """UpdateQuota unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> UpdateQuota: """Test UpdateQuota - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `UpdateQuota` """ - model = UpdateQuota() # noqa: E501 + model = UpdateQuota() if include_optional: return UpdateQuota( available = 56 diff --git a/python/test/test_upload_file_layers_response.py b/python/test/test_upload_file_layers_response.py index 6ca2b139..b8c08759 100644 --- a/python/test/test_upload_file_layers_response.py +++ b/python/test/test_upload_file_layers_response.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.upload_file_layers_response import UploadFileLayersResponse # noqa: E501 +from geoengine_openapi_client.models.upload_file_layers_response import UploadFileLayersResponse class TestUploadFileLayersResponse(unittest.TestCase): """UploadFileLayersResponse unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> UploadFileLayersResponse: """Test UploadFileLayersResponse - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `UploadFileLayersResponse` """ - model = UploadFileLayersResponse() # noqa: E501 + model = UploadFileLayersResponse() if include_optional: return UploadFileLayersResponse( layers = [ diff --git a/python/test/test_upload_files_response.py b/python/test/test_upload_files_response.py index 4e7218b5..385a4a79 100644 --- a/python/test/test_upload_files_response.py +++ b/python/test/test_upload_files_response.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.upload_files_response import UploadFilesResponse # noqa: E501 +from geoengine_openapi_client.models.upload_files_response import UploadFilesResponse class TestUploadFilesResponse(unittest.TestCase): """UploadFilesResponse unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> UploadFilesResponse: """Test UploadFilesResponse - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `UploadFilesResponse` """ - model = UploadFilesResponse() # noqa: E501 + model = UploadFilesResponse() if include_optional: return UploadFilesResponse( files = [ diff --git a/python/test/test_uploads_api.py b/python/test/test_uploads_api.py index 19615ffb..e28b2df6 100644 --- a/python/test/test_uploads_api.py +++ b/python/test/test_uploads_api.py @@ -15,14 +15,14 @@ import unittest -from geoengine_openapi_client.api.uploads_api import UploadsApi # noqa: E501 +from geoengine_openapi_client.api.uploads_api import UploadsApi class TestUploadsApi(unittest.TestCase): """UploadsApi unit test stubs""" def setUp(self) -> None: - self.api = UploadsApi() # noqa: E501 + self.api = UploadsApi() def tearDown(self) -> None: pass @@ -30,21 +30,21 @@ def tearDown(self) -> None: def test_list_upload_file_layers_handler(self) -> None: """Test case for list_upload_file_layers_handler - List the layers of on uploaded file. # noqa: E501 + List the layers of on uploaded file. """ pass def test_list_upload_files_handler(self) -> None: """Test case for list_upload_files_handler - List the files of on upload. # noqa: E501 + List the files of on upload. """ pass def test_upload_handler(self) -> None: """Test case for upload_handler - Uploads files. # noqa: E501 + Uploads files. """ pass diff --git a/python/test/test_usage_summary_granularity.py b/python/test/test_usage_summary_granularity.py index 3ff79209..885691ef 100644 --- a/python/test/test_usage_summary_granularity.py +++ b/python/test/test_usage_summary_granularity.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.usage_summary_granularity import UsageSummaryGranularity # noqa: E501 +from geoengine_openapi_client.models.usage_summary_granularity import UsageSummaryGranularity class TestUsageSummaryGranularity(unittest.TestCase): """UsageSummaryGranularity unit test stubs""" diff --git a/python/test/test_user_api.py b/python/test/test_user_api.py index b6463b36..89f4c05e 100644 --- a/python/test/test_user_api.py +++ b/python/test/test_user_api.py @@ -15,14 +15,14 @@ import unittest -from geoengine_openapi_client.api.user_api import UserApi # noqa: E501 +from geoengine_openapi_client.api.user_api import UserApi class TestUserApi(unittest.TestCase): """UserApi unit test stubs""" def setUp(self) -> None: - self.api = UserApi() # noqa: E501 + self.api = UserApi() def tearDown(self) -> None: pass @@ -30,91 +30,91 @@ def tearDown(self) -> None: def test_add_role_handler(self) -> None: """Test case for add_role_handler - Add a new role. Requires admin privilige. # noqa: E501 + Add a new role. Requires admin privilige. """ pass def test_assign_role_handler(self) -> None: """Test case for assign_role_handler - Assign a role to a user. Requires admin privilige. # noqa: E501 + Assign a role to a user. Requires admin privilige. """ pass def test_computation_quota_handler(self) -> None: """Test case for computation_quota_handler - Retrieves the quota used by computation with the given computation id # noqa: E501 + Retrieves the quota used by computation with the given computation id """ pass def test_computations_quota_handler(self) -> None: """Test case for computations_quota_handler - Retrieves the quota used by computations # noqa: E501 + Retrieves the quota used by computations """ pass def test_data_usage_handler(self) -> None: """Test case for data_usage_handler - Retrieves the data usage # noqa: E501 + Retrieves the data usage """ pass def test_data_usage_summary_handler(self) -> None: """Test case for data_usage_summary_handler - Retrieves the data usage summary # noqa: E501 + Retrieves the data usage summary """ pass def test_get_role_by_name_handler(self) -> None: """Test case for get_role_by_name_handler - Get role by name # noqa: E501 + Get role by name """ pass def test_get_role_descriptions(self) -> None: """Test case for get_role_descriptions - Query roles for the current user. # noqa: E501 + Query roles for the current user. """ pass def test_get_user_quota_handler(self) -> None: """Test case for get_user_quota_handler - Retrieves the available and used quota of a specific user. # noqa: E501 + Retrieves the available and used quota of a specific user. """ pass def test_quota_handler(self) -> None: """Test case for quota_handler - Retrieves the available and used quota of the current user. # noqa: E501 + Retrieves the available and used quota of the current user. """ pass def test_remove_role_handler(self) -> None: """Test case for remove_role_handler - Remove a role. Requires admin privilige. # noqa: E501 + Remove a role. Requires admin privilige. """ pass def test_revoke_role_handler(self) -> None: """Test case for revoke_role_handler - Revoke a role from a user. Requires admin privilige. # noqa: E501 + Revoke a role from a user. Requires admin privilige. """ pass def test_update_user_quota_handler(self) -> None: """Test case for update_user_quota_handler - Update the available quota of a specific user. # noqa: E501 + Update the available quota of a specific user. """ pass diff --git a/python/test/test_user_credentials.py b/python/test/test_user_credentials.py index 870e7d7d..7b3c0dc4 100644 --- a/python/test/test_user_credentials.py +++ b/python/test/test_user_credentials.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.user_credentials import UserCredentials # noqa: E501 +from geoengine_openapi_client.models.user_credentials import UserCredentials class TestUserCredentials(unittest.TestCase): """UserCredentials unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> UserCredentials: """Test UserCredentials - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `UserCredentials` """ - model = UserCredentials() # noqa: E501 + model = UserCredentials() if include_optional: return UserCredentials( email = '', diff --git a/python/test/test_user_info.py b/python/test/test_user_info.py index 55fc4d74..89800932 100644 --- a/python/test/test_user_info.py +++ b/python/test/test_user_info.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.user_info import UserInfo # noqa: E501 +from geoengine_openapi_client.models.user_info import UserInfo class TestUserInfo(unittest.TestCase): """UserInfo unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> UserInfo: """Test UserInfo - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `UserInfo` """ - model = UserInfo() # noqa: E501 + model = UserInfo() if include_optional: return UserInfo( email = '', diff --git a/python/test/test_user_registration.py b/python/test/test_user_registration.py index 14248155..640869ba 100644 --- a/python/test/test_user_registration.py +++ b/python/test/test_user_registration.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.user_registration import UserRegistration # noqa: E501 +from geoengine_openapi_client.models.user_registration import UserRegistration class TestUserRegistration(unittest.TestCase): """UserRegistration unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> UserRegistration: """Test UserRegistration - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `UserRegistration` """ - model = UserRegistration() # noqa: E501 + model = UserRegistration() if include_optional: return UserRegistration( email = '', diff --git a/python/test/test_user_session.py b/python/test/test_user_session.py index d17c4dc6..1f2ec33b 100644 --- a/python/test/test_user_session.py +++ b/python/test/test_user_session.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.user_session import UserSession # noqa: E501 +from geoengine_openapi_client.models.user_session import UserSession class TestUserSession(unittest.TestCase): """UserSession unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> UserSession: """Test UserSession - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `UserSession` """ - model = UserSession() # noqa: E501 + model = UserSession() if include_optional: return UserSession( created = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), diff --git a/python/test/test_plot_update.py b/python/test/test_vec_update.py similarity index 63% rename from python/test/test_plot_update.py rename to python/test/test_vec_update.py index 28cb70dd..6d5b1c1c 100644 --- a/python/test/test_plot_update.py +++ b/python/test/test_vec_update.py @@ -14,12 +14,11 @@ import unittest -import datetime -from geoengine_openapi_client.models.plot_update import PlotUpdate # noqa: E501 +from geoengine_openapi_client.models.vec_update import VecUpdate -class TestPlotUpdate(unittest.TestCase): - """PlotUpdate unit test stubs""" +class TestVecUpdate(unittest.TestCase): + """VecUpdate unit test stubs""" def setUp(self): pass @@ -27,28 +26,28 @@ def setUp(self): def tearDown(self): pass - def make_instance(self, include_optional) -> PlotUpdate: - """Test PlotUpdate - include_option is a boolean, when False only required + def make_instance(self, include_optional) -> VecUpdate: + """Test VecUpdate + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ - # uncomment below to create an instance of `PlotUpdate` + # uncomment below to create an instance of `VecUpdate` """ - model = PlotUpdate() # noqa: E501 + model = VecUpdate() if include_optional: - return PlotUpdate( + return VecUpdate( name = '', workflow = '' ) else: - return PlotUpdate( + return VecUpdate( name = '', workflow = '', ) """ - def testPlotUpdate(self): - """Test PlotUpdate""" + def testVecUpdate(self): + """Test VecUpdate""" # inst_req_only = self.make_instance(include_optional=False) # inst_req_and_optional = self.make_instance(include_optional=True) diff --git a/python/test/test_vector_column_info.py b/python/test/test_vector_column_info.py index a24a3ae7..1868cf7d 100644 --- a/python/test/test_vector_column_info.py +++ b/python/test/test_vector_column_info.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.vector_column_info import VectorColumnInfo # noqa: E501 +from geoengine_openapi_client.models.vector_column_info import VectorColumnInfo class TestVectorColumnInfo(unittest.TestCase): """VectorColumnInfo unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> VectorColumnInfo: """Test VectorColumnInfo - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `VectorColumnInfo` """ - model = VectorColumnInfo() # noqa: E501 + model = VectorColumnInfo() if include_optional: return VectorColumnInfo( data_type = 'category', diff --git a/python/test/test_vector_data_type.py b/python/test/test_vector_data_type.py index 5858c17f..85b30254 100644 --- a/python/test/test_vector_data_type.py +++ b/python/test/test_vector_data_type.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.vector_data_type import VectorDataType # noqa: E501 +from geoengine_openapi_client.models.vector_data_type import VectorDataType class TestVectorDataType(unittest.TestCase): """VectorDataType unit test stubs""" diff --git a/python/test/test_vector_query_rectangle.py b/python/test/test_vector_query_rectangle.py deleted file mode 100644 index 49a2e3a9..00000000 --- a/python/test/test_vector_query_rectangle.py +++ /dev/null @@ -1,78 +0,0 @@ -# coding: utf-8 - -""" - Geo Engine API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - - The version of the OpenAPI document: 0.8.0 - Contact: dev@geoengine.de - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -import unittest -import datetime - -from geoengine_openapi_client.models.vector_query_rectangle import VectorQueryRectangle # noqa: E501 - -class TestVectorQueryRectangle(unittest.TestCase): - """VectorQueryRectangle unit test stubs""" - - def setUp(self): - pass - - def tearDown(self): - pass - - def make_instance(self, include_optional) -> VectorQueryRectangle: - """Test VectorQueryRectangle - include_option is a boolean, when False only required - params are included, when True both required and - optional params are included """ - # uncomment below to create an instance of `VectorQueryRectangle` - """ - model = VectorQueryRectangle() # noqa: E501 - if include_optional: - return VectorQueryRectangle( - spatial_bounds = geoengine_openapi_client.models.bounding_box2_d.BoundingBox2D( - lower_left_coordinate = geoengine_openapi_client.models.coordinate2_d.Coordinate2D( - x = 1.337, - y = 1.337, ), - upper_right_coordinate = geoengine_openapi_client.models.coordinate2_d.Coordinate2D( - x = 1.337, - y = 1.337, ), ), - spatial_resolution = geoengine_openapi_client.models.spatial_resolution.SpatialResolution( - x = 1.337, - y = 1.337, ), - time_interval = geoengine_openapi_client.models.time_interval.TimeInterval( - end = 56, - start = 56, ) - ) - else: - return VectorQueryRectangle( - spatial_bounds = geoengine_openapi_client.models.bounding_box2_d.BoundingBox2D( - lower_left_coordinate = geoengine_openapi_client.models.coordinate2_d.Coordinate2D( - x = 1.337, - y = 1.337, ), - upper_right_coordinate = geoengine_openapi_client.models.coordinate2_d.Coordinate2D( - x = 1.337, - y = 1.337, ), ), - spatial_resolution = geoengine_openapi_client.models.spatial_resolution.SpatialResolution( - x = 1.337, - y = 1.337, ), - time_interval = geoengine_openapi_client.models.time_interval.TimeInterval( - end = 56, - start = 56, ), - ) - """ - - def testVectorQueryRectangle(self): - """Test VectorQueryRectangle""" - # inst_req_only = self.make_instance(include_optional=False) - # inst_req_and_optional = self.make_instance(include_optional=True) - -if __name__ == '__main__': - unittest.main() diff --git a/python/test/test_vector_result_descriptor.py b/python/test/test_vector_result_descriptor.py index b850e4d9..3b2ee847 100644 --- a/python/test/test_vector_result_descriptor.py +++ b/python/test/test_vector_result_descriptor.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.vector_result_descriptor import VectorResultDescriptor # noqa: E501 +from geoengine_openapi_client.models.vector_result_descriptor import VectorResultDescriptor class TestVectorResultDescriptor(unittest.TestCase): """VectorResultDescriptor unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> VectorResultDescriptor: """Test VectorResultDescriptor - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `VectorResultDescriptor` """ - model = VectorResultDescriptor() # noqa: E501 + model = VectorResultDescriptor() if include_optional: return VectorResultDescriptor( bbox = geoengine_openapi_client.models.bounding_box2_d.BoundingBox2D( diff --git a/python/test/test_volume.py b/python/test/test_volume.py index a202f8f9..e86ded45 100644 --- a/python/test/test_volume.py +++ b/python/test/test_volume.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.volume import Volume # noqa: E501 +from geoengine_openapi_client.models.volume import Volume class TestVolume(unittest.TestCase): """Volume unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> Volume: """Test Volume - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `Volume` """ - model = Volume() # noqa: E501 + model = Volume() if include_optional: return Volume( name = '', @@ -43,6 +42,7 @@ def make_instance(self, include_optional) -> Volume: else: return Volume( name = '', + path = '', ) """ diff --git a/python/test/test_volume_file_layers_response.py b/python/test/test_volume_file_layers_response.py index 1519ef81..a1f8f1d1 100644 --- a/python/test/test_volume_file_layers_response.py +++ b/python/test/test_volume_file_layers_response.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.volume_file_layers_response import VolumeFileLayersResponse # noqa: E501 +from geoengine_openapi_client.models.volume_file_layers_response import VolumeFileLayersResponse class TestVolumeFileLayersResponse(unittest.TestCase): """VolumeFileLayersResponse unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> VolumeFileLayersResponse: """Test VolumeFileLayersResponse - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `VolumeFileLayersResponse` """ - model = VolumeFileLayersResponse() # noqa: E501 + model = VolumeFileLayersResponse() if include_optional: return VolumeFileLayersResponse( layers = [ diff --git a/python/test/test_wcs_boundingbox.py b/python/test/test_wcs_boundingbox.py index 5d0e40e8..140c39da 100644 --- a/python/test/test_wcs_boundingbox.py +++ b/python/test/test_wcs_boundingbox.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.wcs_boundingbox import WcsBoundingbox # noqa: E501 +from geoengine_openapi_client.models.wcs_boundingbox import WcsBoundingbox class TestWcsBoundingbox(unittest.TestCase): """WcsBoundingbox unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> WcsBoundingbox: """Test WcsBoundingbox - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `WcsBoundingbox` """ - model = WcsBoundingbox() # noqa: E501 + model = WcsBoundingbox() if include_optional: return WcsBoundingbox( bbox = [ diff --git a/python/test/test_wcs_service.py b/python/test/test_wcs_service.py index c9328602..a674a633 100644 --- a/python/test/test_wcs_service.py +++ b/python/test/test_wcs_service.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.wcs_service import WcsService # noqa: E501 +from geoengine_openapi_client.models.wcs_service import WcsService class TestWcsService(unittest.TestCase): """WcsService unit test stubs""" diff --git a/python/test/test_wcs_version.py b/python/test/test_wcs_version.py index 2d5802ff..43a8e08f 100644 --- a/python/test/test_wcs_version.py +++ b/python/test/test_wcs_version.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.wcs_version import WcsVersion # noqa: E501 +from geoengine_openapi_client.models.wcs_version import WcsVersion class TestWcsVersion(unittest.TestCase): """WcsVersion unit test stubs""" diff --git a/python/test/test_wfs_service.py b/python/test/test_wfs_service.py index b8619dbb..94297d93 100644 --- a/python/test/test_wfs_service.py +++ b/python/test/test_wfs_service.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.wfs_service import WfsService # noqa: E501 +from geoengine_openapi_client.models.wfs_service import WfsService class TestWfsService(unittest.TestCase): """WfsService unit test stubs""" diff --git a/python/test/test_wfs_version.py b/python/test/test_wfs_version.py index a9df7529..377dcabd 100644 --- a/python/test/test_wfs_version.py +++ b/python/test/test_wfs_version.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.wfs_version import WfsVersion # noqa: E501 +from geoengine_openapi_client.models.wfs_version import WfsVersion class TestWfsVersion(unittest.TestCase): """WfsVersion unit test stubs""" diff --git a/python/test/test_wms_service.py b/python/test/test_wms_service.py index bbb5e126..9e94c03e 100644 --- a/python/test/test_wms_service.py +++ b/python/test/test_wms_service.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.wms_service import WmsService # noqa: E501 +from geoengine_openapi_client.models.wms_service import WmsService class TestWmsService(unittest.TestCase): """WmsService unit test stubs""" diff --git a/python/test/test_wms_version.py b/python/test/test_wms_version.py index eb3b5189..d53cd4bf 100644 --- a/python/test/test_wms_version.py +++ b/python/test/test_wms_version.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.wms_version import WmsVersion # noqa: E501 +from geoengine_openapi_client.models.wms_version import WmsVersion class TestWmsVersion(unittest.TestCase): """WmsVersion unit test stubs""" diff --git a/python/test/test_workflow.py b/python/test/test_workflow.py index 50293e40..848700de 100644 --- a/python/test/test_workflow.py +++ b/python/test/test_workflow.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.workflow import Workflow # noqa: E501 +from geoengine_openapi_client.models.workflow import Workflow class TestWorkflow(unittest.TestCase): """Workflow unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> Workflow: """Test Workflow - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `Workflow` """ - model = Workflow() # noqa: E501 + model = Workflow() if include_optional: return Workflow( operator = geoengine_openapi_client.models.typed_operator_operator.TypedOperator_operator( diff --git a/python/test/test_workflows_api.py b/python/test/test_workflows_api.py index fbdd0236..1030c705 100644 --- a/python/test/test_workflows_api.py +++ b/python/test/test_workflows_api.py @@ -15,14 +15,14 @@ import unittest -from geoengine_openapi_client.api.workflows_api import WorkflowsApi # noqa: E501 +from geoengine_openapi_client.api.workflows_api import WorkflowsApi class TestWorkflowsApi(unittest.TestCase): """WorkflowsApi unit test stubs""" def setUp(self) -> None: - self.api = WorkflowsApi() # noqa: E501 + self.api = WorkflowsApi() def tearDown(self) -> None: pass @@ -30,49 +30,49 @@ def tearDown(self) -> None: def test_dataset_from_workflow_handler(self) -> None: """Test case for dataset_from_workflow_handler - Create a task for creating a new dataset from the result of the workflow given by its `id` and the dataset parameters in the request body. # noqa: E501 + Create a task for creating a new dataset from the result of the workflow given by its `id` and the dataset parameters in the request body. Returns the id of the created task """ pass def test_get_workflow_all_metadata_zip_handler(self) -> None: """Test case for get_workflow_all_metadata_zip_handler - Gets a ZIP archive of the worklow, its provenance and the output metadata. # noqa: E501 + Gets a ZIP archive of the worklow, its provenance and the output metadata. """ pass def test_get_workflow_metadata_handler(self) -> None: """Test case for get_workflow_metadata_handler - Gets the metadata of a workflow # noqa: E501 + Gets the metadata of a workflow """ pass def test_get_workflow_provenance_handler(self) -> None: """Test case for get_workflow_provenance_handler - Gets the provenance of all datasets used in a workflow. # noqa: E501 + Gets the provenance of all datasets used in a workflow. """ pass def test_load_workflow_handler(self) -> None: """Test case for load_workflow_handler - Retrieves an existing Workflow. # noqa: E501 + Retrieves an existing Workflow. """ pass def test_raster_stream_websocket(self) -> None: """Test case for raster_stream_websocket - Query a workflow raster result as a stream of tiles via a websocket connection. # noqa: E501 + Query a workflow raster result as a stream of tiles via a websocket connection. """ pass def test_register_workflow_handler(self) -> None: """Test case for register_workflow_handler - Registers a new Workflow. # noqa: E501 + Registers a new Workflow. """ pass diff --git a/python/test/test_wrapped_plot_output.py b/python/test/test_wrapped_plot_output.py index 9ff6d690..a798da80 100644 --- a/python/test/test_wrapped_plot_output.py +++ b/python/test/test_wrapped_plot_output.py @@ -14,9 +14,8 @@ import unittest -import datetime -from geoengine_openapi_client.models.wrapped_plot_output import WrappedPlotOutput # noqa: E501 +from geoengine_openapi_client.models.wrapped_plot_output import WrappedPlotOutput class TestWrappedPlotOutput(unittest.TestCase): """WrappedPlotOutput unit test stubs""" @@ -29,12 +28,12 @@ def tearDown(self): def make_instance(self, include_optional) -> WrappedPlotOutput: """Test WrappedPlotOutput - include_option is a boolean, when False only required + include_optional is a boolean, when False only required params are included, when True both required and optional params are included """ # uncomment below to create an instance of `WrappedPlotOutput` """ - model = WrappedPlotOutput() # noqa: E501 + model = WrappedPlotOutput() if include_optional: return WrappedPlotOutput( data = None,