diff --git a/.gitignore b/.gitignore index 18b522c3a..47f31defc 100644 --- a/.gitignore +++ b/.gitignore @@ -32,3 +32,5 @@ compliance_tool/aas_compliance_tool/version.py # ignore the content of the server storage server/storage/ +test.py +/storage/ diff --git a/discovery_server/Dockerfile b/discovery_server/Dockerfile new file mode 100644 index 000000000..e0c1f1f14 --- /dev/null +++ b/discovery_server/Dockerfile @@ -0,0 +1,50 @@ +FROM python:3.11-alpine + +ENV PYTHONDONTWRITEBYTECODE=1 +ENV PYTHONUNBUFFERED=1 +ENV PYTHONPATH="${PYTHONPATH}:/app" + +# If we have more dependencies for the server it would make sense +# to refactor uswgi to the pyproject.toml +RUN apk update && \ + apk add --no-cache nginx supervisor gcc musl-dev linux-headers python3-dev git bash && \ + pip install uwsgi && \ + apk del git bash + + +COPY discovery_server/uwsgi.ini /etc/uwsgi/ +COPY discovery_server/supervisord.ini /etc/supervisor/conf.d/supervisord.ini +COPY discovery_server/stop-supervisor.sh /etc/supervisor/stop-supervisor.sh +RUN chmod +x /etc/supervisor/stop-supervisor.sh + +# Makes it possible to use a different configuration +ENV UWSGI_INI=/etc/uwsgi/uwsgi.ini +# object stores aren't thread-safe yet +# https://github.com/eclipse-basyx/basyx-python-sdk/issues/205 +ENV UWSGI_CHEAPER=0 +ENV UWSGI_PROCESSES=1 +ENV NGINX_MAX_UPLOAD=1M +ENV NGINX_WORKER_PROCESSES=1 +ENV LISTEN_PORT=80 +ENV CLIENT_BODY_BUFFER_SIZE=1M + +# Copy the entrypoint that will generate Nginx additional configs +COPY discovery_server/entrypoint.sh /entrypoint.sh +RUN chmod +x /entrypoint.sh + +ENTRYPOINT ["/entrypoint.sh"] + +ENV SETUPTOOLS_SCM_PRETEND_VERSION=1.0.0 + + +COPY ./discovery_server/requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +COPY ./sdk /sdk +COPY ./server /app/server +COPY ./discovery_server/app /app + +WORKDIR /app +RUN pip install ../sdk + +CMD ["/usr/bin/supervisord", "-c", "/etc/supervisor/conf.d/supervisord.ini"] \ No newline at end of file diff --git a/discovery_server/README.md b/discovery_server/README.md new file mode 100644 index 000000000..5083f48a7 --- /dev/null +++ b/discovery_server/README.md @@ -0,0 +1,48 @@ +# Eclipse BaSyx Python SDK - Discovery Service + +This is a Python-based implementation of the **BaSyx Asset Administration Shell (AAS) Discovery Service**. +It provides basic discovery functionality for AAS IDs and their corresponding assets, as specified in the official [Discovery Service Specification v3.1.0_SSP-001](https://app.swaggerhub.com/apis/Plattform_i40/DiscoveryServiceSpecification/V3.1.0_SSP-001). + +## Overview + +The Discovery Service stores and retrieves relations between AAS identifiers and asset identifiers. It acts as a lookup service for resolving asset-related queries to corresponding AAS. + +## Features + +| Function | Description | Example URL | +|------------------------------------------|----------------------------------------------------------|-----------------------------------------------------------------------| +| **search_all_aas_ids_by_asset_link** | Find AAS identifiers by providing asset link values | `POST http://localhost:8084/api/v3.0/lookup/shellsByAssetLink` | +| **get_all_specific_asset_ids_by_aas_id** | Return specific asset ids associated with an AAS ID | `GET http://localhost:8084/api/v3.0/lookup/shells/{aasIdentifier}` | +| **post_all_asset_links_by_id** | Register specific asset ids linked to an AAS | `POST http://localhost:8084/api/v3.0/lookup/shells/{aasIdentifier}` | +| **delete_all_asset_links_by_id** | Delete all asset links associated with a specific AAS ID | `DELETE http://localhost:8084/api/v3.0/lookup/shells/{aasIdentifier}` | +| + +## Configuration + +The service can be configured to use either: + +- **In-memory storage** (default): Temporary data storage that resets on service restart. +- **MongoDB storage**: Persistent backend storage using MongoDB. + +### Configuration via Environment Variables + +| Variable | Description | Default | +|------------------|--------------------------------------------|-----------------------------| +| `STORAGE_TYPE` | `inmemory` or `mongodb` | `inmemory` | +| `MONGODB_URI` | MongoDB connection URI | `mongodb://localhost:27017` | +| `MONGODB_DBNAME` | Name of the MongoDB database | `basyx_registry` | + +## Deployment via Docker + +A `Dockerfile` and `docker-compose.yml` are provided for simple deployment. +The container image can be built and run via: +```bash +docker compose up --build +``` +## Test + +Examples of asset links and specific asset IDs for testing purposes are provided as JSON files in the [storage](./storage) folder. + +## Acknowledgments + +This Dockerfile is inspired by the [tiangolo/uwsgi-nginx-docker](https://github.com/tiangolo/uwsgi-nginx-docker) repository. diff --git a/discovery_server/app/main.py b/discovery_server/app/main.py new file mode 100644 index 000000000..0092e6918 --- /dev/null +++ b/discovery_server/app/main.py @@ -0,0 +1,25 @@ +import os +import sys +from server.app.interfaces.discovery import DiscoveryAPI, MongoDiscoveryStore,InMemoryDiscoveryStore + +storage_type = os.getenv("STORAGE_TYPE", "inmemory") +base_path = os.getenv("API_BASE_PATH") + +wsgi_optparams = {} + +if base_path is not None: + wsgi_optparams["base_path"] = base_path + +if storage_type == "inmemory": + application = DiscoveryAPI(InMemoryDiscoveryStore(), **wsgi_optparams) + +elif storage_type == "mongodb": + uri = os.getenv("MONGODB_URI", "mongodb://localhost:27017") + dbname = os.getenv("MONGODB_DBNAME", "basyx_registry") + + application = DiscoveryAPI(MongoDiscoveryStore(uri,dbname), **wsgi_optparams) + +else: + print(f"STORAGE_TYPE must be either inmemory or mongodb! Current value: {storage_type}", + file=sys.stderr) + diff --git a/discovery_server/compose.yml b/discovery_server/compose.yml new file mode 100644 index 000000000..56be002e1 --- /dev/null +++ b/discovery_server/compose.yml @@ -0,0 +1,9 @@ +services: + app: + build: + context: .. + dockerfile: discovery_server/Dockerfile + ports: + - "8084:80" + environment: + - STORAGE_TYPE=inmemory diff --git a/discovery_server/entrypoint.sh b/discovery_server/entrypoint.sh new file mode 100644 index 000000000..722394409 --- /dev/null +++ b/discovery_server/entrypoint.sh @@ -0,0 +1,71 @@ +#!/usr/bin/env sh +set -e + +# Get the maximum upload file size for Nginx, default to 0: unlimited +USE_NGINX_MAX_UPLOAD=${NGINX_MAX_UPLOAD:-0} + +# Get the number of workers for Nginx, default to 1 +USE_NGINX_WORKER_PROCESSES=${NGINX_WORKER_PROCESSES:-1} + +# Set the max number of connections per worker for Nginx, if requested +# Cannot exceed worker_rlimit_nofile, see NGINX_WORKER_OPEN_FILES below +NGINX_WORKER_CONNECTIONS=${NGINX_WORKER_CONNECTIONS:-1024} + +# Get the listen port for Nginx, default to 80 +USE_LISTEN_PORT=${LISTEN_PORT:-80} + +# Get the client_body_buffer_size for Nginx, default to 1M +USE_CLIENT_BODY_BUFFER_SIZE=${CLIENT_BODY_BUFFER_SIZE:-1M} + +# Create the conf.d directory if it doesn't exist +if [ ! -d /etc/nginx/conf.d ]; then + mkdir -p /etc/nginx/conf.d +fi + +if [ -f /app/nginx.conf ]; then + cp /app/nginx.conf /etc/nginx/nginx.conf +else + content='user nginx;\n' + # Set the number of worker processes in Nginx + content=$content"worker_processes ${USE_NGINX_WORKER_PROCESSES};\n" + content=$content'error_log /var/log/nginx/error.log warn;\n' + content=$content'pid /var/run/nginx.pid;\n' + content=$content'events {\n' + content=$content" worker_connections ${NGINX_WORKER_CONNECTIONS};\n" + content=$content'}\n' + content=$content'http {\n' + content=$content' include /etc/nginx/mime.types;\n' + content=$content' default_type application/octet-stream;\n' + content=$content' log_format main '"'\$remote_addr - \$remote_user [\$time_local] \"\$request\" '\n" + content=$content' '"'\$status \$body_bytes_sent \"\$http_referer\" '\n" + content=$content' '"'\"\$http_user_agent\" \"\$http_x_forwarded_for\"';\n" + content=$content' access_log /var/log/nginx/access.log main;\n' + content=$content' sendfile on;\n' + content=$content' keepalive_timeout 65;\n' + content=$content' include /etc/nginx/conf.d/*.conf;\n' + content=$content'}\n' + content=$content'daemon off;\n' + # Set the max number of open file descriptors for Nginx workers, if requested + if [ -n "${NGINX_WORKER_OPEN_FILES}" ] ; then + content=$content"worker_rlimit_nofile ${NGINX_WORKER_OPEN_FILES};\n" + fi + # Save generated /etc/nginx/nginx.conf + printf "$content" > /etc/nginx/nginx.conf + + content_server='server {\n' + content_server=$content_server" listen ${USE_LISTEN_PORT};\n" + content_server=$content_server' location / {\n' + content_server=$content_server' include uwsgi_params;\n' + content_server=$content_server' uwsgi_pass unix:///tmp/uwsgi.sock;\n' + content_server=$content_server' }\n' + content_server=$content_server'}\n' + # Save generated server /etc/nginx/conf.d/nginx.conf + printf "$content_server" > /etc/nginx/conf.d/nginx.conf + + # # Generate additional configuration + printf "client_max_body_size $USE_NGINX_MAX_UPLOAD;\n" > /etc/nginx/conf.d/upload.conf + printf "client_body_buffer_size $USE_CLIENT_BODY_BUFFER_SIZE;\n" > /etc/nginx/conf.d/body-buffer-size.conf + printf "add_header Access-Control-Allow-Origin *;\n" > /etc/nginx/conf.d/cors-header.conf +fi + +exec "$@" diff --git a/discovery_server/requirements.txt b/discovery_server/requirements.txt new file mode 100644 index 000000000..376baed5f --- /dev/null +++ b/discovery_server/requirements.txt @@ -0,0 +1,2 @@ +Werkzeug +pymongo diff --git a/discovery_server/stop-supervisor.sh b/discovery_server/stop-supervisor.sh new file mode 100644 index 000000000..9a953c94b --- /dev/null +++ b/discovery_server/stop-supervisor.sh @@ -0,0 +1,8 @@ +#!/usr/bin/env sh + +printf "READY\n" + +while read line; do + echo "Processing Event: $line" >&2 + kill $PPID +done < /dev/stdin diff --git a/discovery_server/storage/AssetIdsFullExample.json b/discovery_server/storage/AssetIdsFullExample.json new file mode 100644 index 000000000..720d106fc --- /dev/null +++ b/discovery_server/storage/AssetIdsFullExample.json @@ -0,0 +1,62 @@ +[ + { + "semanticId": { + "type": "ExternalReference", + "keys": [ + { + "type": "GlobalReference", + "value": "ud800;udbff3udbffUud800Bud800qudbffhudbffTd6^dnTudbff5?Aoudbff36Xud800>udbffUudbff\"Hjeud800Fudbff;udbffC?5q]udbff8aIudbffkp[?sud800kXljub;Gudbffqud8003ud8005udbff[>Z6d_udbffO=hxs R9<_pudbffo" + } + ], + "referredSemanticId": { + "type": "ExternalReference", + "keys": [ + { + "type": "GlobalReference", + "value": "ooOud800pqudbfffud800b:4udbffiudbffudbffd_ud800sJudbffOudbffiB:udbff@pEudbffM;8ud800mS;udbff3ud800q8udbff^udbffmDhFttgudbffrudbffhudbffrEud800e" + } + ] + } + } + ], + "name": "ud800Vud800?ud800tudbff1Ah_ud8003udbffZud800d5WAud800ScMIud800e>", + "value": "udbffBudbffSud800udbffn%ud800kudbffa:Tcfudbff?udbff?ud8005udbffZudbff_ud800iud800qq.@Zud800jmludbffFB<:Wfud800=audbffludbffailudbff?ud800uLudbff7ud800GJqG'ud800kudbffrudbff>>RudbffQudbff=udbffQS]UudbffOZS", + "externalSubjectId": { + "type": "ExternalReference", + "keys": [ + { + "type": "GlobalReference", + "value": "^7<\\agVu_%ud800:pD<-ud800j9udbffkiKCudbffVudbffjudbffDudbffiudbffZsud800WhLG:tQfLP" + } + ], + "referredSemanticId": { + "type": "ExternalReference", + "keys": [ + { + "type": "GlobalReference", + "value": "]Pud800DudbffY[0Y", + "value": "udbffBudbffSud800udbffn%ud800kudbffa:Tcfudbff?udbff?ud8005udbffZudbff_ud800iud800qq.@Zud800jmludbffFB<:Wfud800=audbffludbffailudbff?ud800uLudbff7ud800GJqG'ud800kudbffrudbff>>RudbffQudbff=udbffQS]UudbffOZS" + } +] \ No newline at end of file diff --git a/discovery_server/supervisord.ini b/discovery_server/supervisord.ini new file mode 100644 index 000000000..d73d98014 --- /dev/null +++ b/discovery_server/supervisord.ini @@ -0,0 +1,27 @@ +[supervisord] +nodaemon=true + +[program:uwsgi] +command=/usr/local/bin/uwsgi --ini /etc/uwsgi/uwsgi.ini +stdout_logfile=/dev/stdout +stdout_logfile_maxbytes=0 +stderr_logfile=/dev/stderr +stderr_logfile_maxbytes=0 +startsecs = 0 +autorestart=false +# may make sense to have autorestart enabled in production + +[program:nginx] +command=/usr/sbin/nginx +stdout_logfile=/var/log/nginx.out.log +stdout_logfile_maxbytes=0 +stderr_logfile=/var/log/nginx.err.log +stderr_logfile_maxbytes=0 +stopsignal=QUIT +startsecs = 0 +autorestart=false +# may make sense to have autorestart enabled in production + +[eventlistener:quit_on_failure] +events=PROCESS_STATE_STOPPED,PROCESS_STATE_EXITED,PROCESS_STATE_FATAL +command=/etc/supervisor/stop-supervisor.sh diff --git a/discovery_server/uwsgi.ini b/discovery_server/uwsgi.ini new file mode 100644 index 000000000..9c54ae1cc --- /dev/null +++ b/discovery_server/uwsgi.ini @@ -0,0 +1,9 @@ +[uwsgi] +wsgi-file = /app/main.py +socket = /tmp/uwsgi.sock +chown-socket = nginx:nginx +chmod-socket = 664 +hook-master-start = unix_signal:15 gracefully_kill_them_all +need-app = true +die-on-term = true +show-config = false diff --git a/registry_server/Dockerfile b/registry_server/Dockerfile new file mode 100644 index 000000000..344ddd0ee --- /dev/null +++ b/registry_server/Dockerfile @@ -0,0 +1,49 @@ +FROM python:3.11-alpine + +ENV PYTHONDONTWRITEBYTECODE=1 +ENV PYTHONUNBUFFERED=1 + +# If we have more dependencies for the server it would make sense +# to refactor uswgi to the pyproject.toml +RUN apk update && \ + apk add --no-cache nginx supervisor gcc musl-dev linux-headers python3-dev git bash && \ + pip install uwsgi && \ + apk del git bash + + +COPY registry_server/uwsgi.ini /etc/uwsgi/ +COPY registry_server/supervisord.ini /etc/supervisor/conf.d/supervisord.ini +COPY registry_server/stop-supervisor.sh /etc/supervisor/stop-supervisor.sh +RUN chmod +x /etc/supervisor/stop-supervisor.sh + +# Makes it possible to use a different configuration +ENV UWSGI_INI=/etc/uwsgi/uwsgi.ini +# object stores aren't thread-safe yet +# https://github.com/eclipse-basyx/basyx-python-sdk/issues/205 +ENV UWSGI_CHEAPER=0 +ENV UWSGI_PROCESSES=1 +ENV NGINX_MAX_UPLOAD=1M +ENV NGINX_WORKER_PROCESSES=1 +ENV LISTEN_PORT=80 +ENV CLIENT_BODY_BUFFER_SIZE=1M + +# Copy the entrypoint that will generate Nginx additional configs +COPY registry_server/entrypoint.sh /entrypoint.sh +RUN chmod +x /entrypoint.sh + +ENTRYPOINT ["/entrypoint.sh"] + +ENV SETUPTOOLS_SCM_PRETEND_VERSION=1.0.0 + + +COPY ./registry_server/requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +COPY ./sdk /sdk +COPY ./server /server +COPY ./registry_server/app /app + +WORKDIR /app +RUN pip install ../sdk + +CMD ["/usr/bin/supervisord", "-c", "/etc/supervisor/conf.d/supervisord.ini"] \ No newline at end of file diff --git a/registry_server/README.md b/registry_server/README.md new file mode 100644 index 000000000..6c62e5068 --- /dev/null +++ b/registry_server/README.md @@ -0,0 +1,66 @@ +# Eclipse BaSyx Python SDK - Registry Service + +This is a Python-based implementation of the **BaSyx Asset Administration Shell (AAS) Registry Service**. +It provides basic registry functionality for AAS and submodels descriptors, as specified in the official [Asset Administration Shell Registry Service Specification v3.1.0_SSP-001](https://app.swaggerhub.com/apis/Plattform_i40/AssetAdministrationShellRegistryServiceSpecification/V3.1.0_SSP-001) and [Submodel Registry Service Specification v3.1.0_SSP-001](https://app.swaggerhub.com/apis/Plattform_i40/SubmodelRegistryServiceSpecification/V3.1.0_SSP-001). + +## Overview + +The Registry Service provides the endpoint for a given AAS-ID or Submodel-ID. Such an endpoint for an AAS and the related Submodel-IDs make the AAS and the submodels with their submodelElements accessible. + + + +## Features +# AAS Registry: +| Function | Description | Example URL | +|--------------------------------------------------|----------------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------| +| **GetAllAssetAdministrationShellDescriptors** | Return all AAS descriptor | `GET http://localhost:8083/api/v3.0/shell-descriptors` | +| **GetAssetAdministrationShellDescriptorById** | Return a specific AAS descriptor | `GET http://localhost:8083/api/v3.0/shell-descriptors/{aasIdentifier}` | +| **PostAssetAdministrationShellDescriptor** | Register/create a new AAS descriptor | `POST http://localhost:8083/api/v3.0/shell-descriptors` | +| **PutAssetAdministrationShellDescriptorById** | Update an existing AAS descriptor | `PUT http://localhost:8083/api/v3.0/shell-descriptors/{aasIdentifier}` | +| **DeleteAssetAdministrationShellDescriptorById** | Delete an AAS descriptor by ID | `DELETE http://localhost:8083/api/v3.0/shell-descriptors/{aasIdentifier}` | +| **GetSubmodelDescriptorsThroughSuperPath** | Return all submodel descriptors under AAS descriptor | `GET http://localhost:8083/api/v3.0/shell-descriptors/{aasIdentifier}/submodel-descriptors` | +| **PostSubmodelDescriptorThroughSuperPath** | Register/create a new submodel descriptor under AAS descriptor | `POST http://localhost:8083/api/v3.0/shell-descriptors/{aasIdentifier}/submodel-descriptors` | +| **GetSubmodelDescriptorThroughSuperPath** | Return a specific submodel descriptor under AAS descriptor | `GET http://localhost:8083/api/v3.0/shell-descriptors/{aasIdentifier}/submodel-descriptors/{submodelIdentifier}` | +| **PutSubmodelDescriptorThroughSuperPath** | Update a specific submodel descriptor under AAS descriptor | `PUT http://localhost:8083/api/v3.0/shell-descriptors/{aasIdentifier}/submodel-descriptors/{submodelIdentifier}` | +| **DeleteSubmodelDescriptorThroughSuperPath** | Delete a specific submodel descriptor under AAS descriptor | `DELETE http://localhost:8083/api/v3.0/shell-descriptors/{aasIdentifier}/submodel-descriptors/{submodelIdentifier}` | +| **GetDescription** | Return the self‑description of the AAS registry service | `GET http://localhost:8083/api/v3.0/description` | + +# Submodel Registry: +| Function | Description | Example URL | +|----------------------------------|--------------------------------------------------------------|-----------------------------------------------------------------------------------| +| **GetAllSubmodelDescriptors** | Return all submodel descriptors | `GET http://localhost:8083/api/v3.0/submodel-descriptors` | +| **PostSubmodelDescriptor** | Register/create a new submodel descriptor | `POST http://localhost:8083/api/v3.0/submodel-descriptors` | +| **GetSubmodelDescriptorById** | Return a specific submodel descriptor | `GET http://localhost:8083/api/v3.0/submodel-descriptors/{submodelIdentifier}` | +| **PutSubmodelDescriptorById** | Update a specific submodel descriptor | `PUT http://localhost:8083/api/v3.0/submodel-descriptors/{submodelIdentifier}` | +| **DeleteSubmodelDescriptorById** | Delete a specific submodel descriptor | `DELETE http://localhost:8083/api/v3.0/submodel-descriptors/{submodelIdentifier}` | +| **GetDescription** | Return the self‑description of the submodel registry service | `GET http://localhost:8083/api/v3.0/description` | + + + +## Configuration + +The container can be configured via environment variables: + +- `API_BASE_PATH` determines the base path under which all other API paths are made available. Default: `/api/v3.0` +- `STORAGE_TYPE` can be one of `LOCAL_FILE_READ_ONLY` or `LOCAL_FILE_BACKEND`: + - When set to `LOCAL_FILE_READ_ONLY` (the default), the server will read and serve JSON files from the storage directory. The files are not modified, all changes done via the API are only stored in memory. + - When instead set to `LOCAL_FILE_BACKEND`, the server makes use of the [LocalFileBackend](https://github.com/eclipse-basyx/basyx-python-sdk/tree/main/backend/basyx_backend/local_file), where AAS and Submodels descriptors are persistently stored as JSON files. +- `STORAGE_PATH` sets the directory to read the files from *within the container*. If you bind your files to a directory different from the default `/storage`, you can use this variable to adjust the server accordingly. + + +## Deployment via Docker + +A `Dockerfile` and `docker-compose.yml` are provided for simple deployment. +The container image can be built and run via: +```bash +docker compose up --build +``` + +## Test + +An example descriptor for testing purposes is provided as a JSON file in the [storage](./storage) folder. + +## Acknowledgments + +This Dockerfile is inspired by the [tiangolo/uwsgi-nginx-docker](https://github.com/tiangolo/uwsgi-nginx-docker) repository. + diff --git a/registry_server/app/main.py b/registry_server/app/main.py new file mode 100644 index 000000000..b182118ee --- /dev/null +++ b/registry_server/app/main.py @@ -0,0 +1,27 @@ +import sys +import os +sys.path.insert(0, "/") +from basyx.aas.backend.local_file import LocalFileObjectStore +from basyx.aas import model +from server.app.interfaces.registry import RegistryAPI + +storage_path = os.getenv("STORAGE_PATH", "/storage") +storage_type = os.getenv("STORAGE_TYPE", "LOCAL_FILE_READ_ONLY") +base_path = os.getenv("API_BASE_PATH") + +wsgi_optparams = {} + +if base_path is not None: + wsgi_optparams["base_path"] = base_path + +if storage_type == "LOCAL_FILE_BACKEND": + application = RegistryAPI(LocalFileObjectStore(storage_path), **wsgi_optparams) + +elif storage_type in "LOCAL_FILE_READ_ONLY": + object_store: model.DictObjectStore = model.DictObjectStore() + + application = RegistryAPI(object_store, **wsgi_optparams) + +else: + print(f"STORAGE_TYPE must be either LOCAL_FILE or LOCAL_FILE_READ_ONLY! Current value: {storage_type}", + file=sys.stderr) \ No newline at end of file diff --git a/registry_server/compose.yml b/registry_server/compose.yml new file mode 100644 index 000000000..99f3e3cc9 --- /dev/null +++ b/registry_server/compose.yml @@ -0,0 +1,9 @@ +services: + app: + build: + context: .. + dockerfile: registry_server/Dockerfile + ports: + - "8083:80" + volumes: + - ./storage:/storage diff --git a/registry_server/entrypoint.sh b/registry_server/entrypoint.sh new file mode 100644 index 000000000..522d4fca2 --- /dev/null +++ b/registry_server/entrypoint.sh @@ -0,0 +1,71 @@ +#!/usr/bin/env sh +set -e + +# Get the maximum upload file size for Nginx, default to 0: unlimited +USE_NGINX_MAX_UPLOAD=${NGINX_MAX_UPLOAD:-0} + +# Get the number of workers for Nginx, default to 1 +USE_NGINX_WORKER_PROCESSES=${NGINX_WORKER_PROCESSES:-1} + +# Set the max number of connections per worker for Nginx, if requested +# Cannot exceed worker_rlimit_nofile, see NGINX_WORKER_OPEN_FILES below +NGINX_WORKER_CONNECTIONS=${NGINX_WORKER_CONNECTIONS:-1024} + +# Get the listen port for Nginx, default to 80 +USE_LISTEN_PORT=${LISTEN_PORT:-80} + +# Get the client_body_buffer_size for Nginx, default to 1M +USE_CLIENT_BODY_BUFFER_SIZE=${CLIENT_BODY_BUFFER_SIZE:-1M} + +# Create the conf.d directory if it doesn't exist +if [ ! -d /etc/nginx/conf.d ]; then + mkdir -p /etc/nginx/conf.d +fi + +if [ -f /app/nginx.conf ]; then + cp /app/nginx.conf /etc/nginx/nginx.conf +else + content='user nginx;\n' + # Set the number of worker processes in Nginx + content=$content"worker_processes ${USE_NGINX_WORKER_PROCESSES};\n" + content=$content'error_log /var/log/nginx/error.log warn;\n' + content=$content'pid /var/run/nginx.pid;\n' + content=$content'events {\n' + content=$content" worker_connections ${NGINX_WORKER_CONNECTIONS};\n" + content=$content'}\n' + content=$content'http {\n' + content=$content' include /etc/nginx/mime.types;\n' + content=$content' default_type application/octet-stream;\n' + content=$content' log_format main '"'\$remote_addr - \$remote_user [\$time_local] \"\$request\" '\n" + content=$content' '"'\$status \$body_bytes_sent \"\$http_referer\" '\n" + content=$content' '"'\"\$http_user_agent\" \"\$http_x_forwarded_for\"';\n" + content=$content' access_log /var/log/nginx/access.log main;\n' + content=$content' sendfile on;\n' + content=$content' keepalive_timeout 65;\n' + content=$content' include /etc/nginx/conf.d/*.conf;\n' + content=$content'}\n' + content=$content'daemon off;\n' + # Set the max number of open file descriptors for Nginx workers, if requested + if [ -n "${NGINX_WORKER_OPEN_FILES}" ] ; then + content=$content"worker_rlimit_nofile ${NGINX_WORKER_OPEN_FILES};\n" + fi + # Save generated /etc/nginx/nginx.conf + printf "$content" > /etc/nginx/nginx.conf + + content_server='server {\n' + content_server=$content_server" listen ${USE_LISTEN_PORT};\n" + content_server=$content_server' location / {\n' + content_server=$content_server' include uwsgi_params;\n' + content_server=$content_server' uwsgi_pass unix:///tmp/uwsgi.sock;\n' + content_server=$content_server' }\n' + content_server=$content_server'}\n' + # Save generated server /etc/nginx/conf.d/nginx.conf + printf "$content_server" > /etc/nginx/conf.d/nginx.conf + + # # Generate additional configuration + printf "client_max_body_size $USE_NGINX_MAX_UPLOAD;\n" > /etc/nginx/conf.d/upload.conf + printf "client_body_buffer_size $USE_CLIENT_BODY_BUFFER_SIZE;\n" > /etc/nginx/conf.d/body-buffer-size.conf + printf "add_header Access-Control-Allow-Origin *;\n" > /etc/nginx/conf.d/cors-header.conf +fi + +exec "$@" \ No newline at end of file diff --git a/registry_server/requirements.txt b/registry_server/requirements.txt new file mode 100644 index 000000000..9c12eecda --- /dev/null +++ b/registry_server/requirements.txt @@ -0,0 +1 @@ +Werkzeug diff --git a/registry_server/stop-supervisor.sh b/registry_server/stop-supervisor.sh new file mode 100644 index 000000000..9a953c94b --- /dev/null +++ b/registry_server/stop-supervisor.sh @@ -0,0 +1,8 @@ +#!/usr/bin/env sh + +printf "READY\n" + +while read line; do + echo "Processing Event: $line" >&2 + kill $PPID +done < /dev/stdin diff --git a/registry_server/storage/descriptorCompleteExample.json b/registry_server/storage/descriptorCompleteExample.json new file mode 100644 index 000000000..f762aa836 --- /dev/null +++ b/registry_server/storage/descriptorCompleteExample.json @@ -0,0 +1,71 @@ +{ + "id": "https://example.org/aas/motor", + "endpoints": [ + { + "protocolInformation": { + "href": "https://localhost:1234/api/v3.0/aas", + "endpointProtocol": "HTTP", + "endpointProtocolVersion": [ + "1.1" + ] + }, + "interface": "AAS-3.0" + }, + { + "protocolInformation": { + "href": "opc.tcp://localhost:4840" + }, + "interface": "AAS-3.0" + }, + { + "protocolInformation": { + "href": "https://localhost:5678", + "endpointProtocol": "HTTP", + "endpointProtocolVersion": [ + "1.1" + ], + "subprotocol": "OPC UA Basic SOAP", + "subprotocolBody": "ns=2;s=MyAAS", + "subprotocolBodyEncoding": "application/soap+xml" + }, + "interface": "AAS-3.0" + } + ], + "submodelDescriptors":[ + { + "id": "https://admin-shell.io/zvei/nameplate/1/0/Nameplate", + "endpoints": [ + { + "href": { + "href": "https://localhost:1234/api/v3.0/submodel", + "endpointProtocol": "HTTP", + "endpointProtocolVersion": [ + "1.1" + ] + }, + "interface": "AAS-3.0" + }, + { + "protocolInformation": { + "href": "opc.tcp://localhost:4840" + }, + "interface": "AAS-3.0" + }, + { + "protocolInformation": { + "href": "https://localhost:5678", + "endpointProtocol": "HTTP", + "endpointProtocolVersion": [ + "1.1" + ], + "subprotocol": "OPC UA Basic SOAP", + "subprotocolBody": "ns=2;s=MyAAS", + "subprotocolBodyEncoding": "application/soap+xml" + }, + "interface": "AAS-3.0" + } + ] + } + + ] +} \ No newline at end of file diff --git a/registry_server/supervisord.ini b/registry_server/supervisord.ini new file mode 100644 index 000000000..d73d98014 --- /dev/null +++ b/registry_server/supervisord.ini @@ -0,0 +1,27 @@ +[supervisord] +nodaemon=true + +[program:uwsgi] +command=/usr/local/bin/uwsgi --ini /etc/uwsgi/uwsgi.ini +stdout_logfile=/dev/stdout +stdout_logfile_maxbytes=0 +stderr_logfile=/dev/stderr +stderr_logfile_maxbytes=0 +startsecs = 0 +autorestart=false +# may make sense to have autorestart enabled in production + +[program:nginx] +command=/usr/sbin/nginx +stdout_logfile=/var/log/nginx.out.log +stdout_logfile_maxbytes=0 +stderr_logfile=/var/log/nginx.err.log +stderr_logfile_maxbytes=0 +stopsignal=QUIT +startsecs = 0 +autorestart=false +# may make sense to have autorestart enabled in production + +[eventlistener:quit_on_failure] +events=PROCESS_STATE_STOPPED,PROCESS_STATE_EXITED,PROCESS_STATE_FATAL +command=/etc/supervisor/stop-supervisor.sh diff --git a/registry_server/uwsgi.ini b/registry_server/uwsgi.ini new file mode 100644 index 000000000..f333b2299 --- /dev/null +++ b/registry_server/uwsgi.ini @@ -0,0 +1,10 @@ +[uwsgi] +wsgi-file = /app/main.py +socket = /tmp/uwsgi.sock +chown-socket = nginx:nginx +chmod-socket = 664 +hook-master-start = unix_signal:15 gracefully_kill_them_all +need-app = true +die-on-term = true +show-config = false +logto = /tmp/uwsgi.log diff --git a/sdk/basyx/aas/adapter/_generic.py b/sdk/basyx/aas/adapter/_generic.py index 79c98fc8c..65d14d8d3 100644 --- a/sdk/basyx/aas/adapter/_generic.py +++ b/sdk/basyx/aas/adapter/_generic.py @@ -19,6 +19,13 @@ PathOrBinaryIO = Union[Path, BinaryIO] PathOrIO = Union[Path, IO] # IO is TextIO or BinaryIO +# JSON top-level keys and their corresponding model classes +JSON_AAS_TOP_LEVEL_KEYS_TO_TYPES = ( + ('assetAdministrationShells', model.AssetAdministrationShell), + ('submodels', model.Submodel), + ('conceptDescriptions', model.ConceptDescription), +) + # XML Namespace definition XML_NS_MAP = {"aas": "https://admin-shell.io/aas/3/0"} XML_NS_AAS = "{" + XML_NS_MAP["aas"] + "}" diff --git a/sdk/basyx/aas/adapter/json/json_deserialization.py b/sdk/basyx/aas/adapter/json/json_deserialization.py index 78e3713f5..cd7ce9fb0 100644 --- a/sdk/basyx/aas/adapter/json/json_deserialization.py +++ b/sdk/basyx/aas/adapter/json/json_deserialization.py @@ -34,12 +34,13 @@ import json import logging import pprint -from typing import Dict, Callable, ContextManager, TypeVar, Type, List, IO, Optional, Set, get_args +from typing import (Dict, Callable, ContextManager, TypeVar, Type, + List, IO, Optional, Set, get_args, Tuple, Iterable, Any) from basyx.aas import model from .._generic import MODELLING_KIND_INVERSE, ASSET_KIND_INVERSE, KEY_TYPES_INVERSE, ENTITY_TYPES_INVERSE, \ IEC61360_DATA_TYPES_INVERSE, IEC61360_LEVEL_TYPES_INVERSE, KEY_TYPES_CLASSES_INVERSE, REFERENCE_TYPES_INVERSE, \ - DIRECTION_INVERSE, STATE_OF_EVENT_INVERSE, QUALIFIER_KIND_INVERSE, PathOrIO, Path + DIRECTION_INVERSE, STATE_OF_EVENT_INVERSE, QUALIFIER_KIND_INVERSE, PathOrIO, Path, JSON_AAS_TOP_LEVEL_KEYS_TO_TYPES logger = logging.getLogger(__name__) @@ -154,19 +155,20 @@ def __init__(self, *args, **kwargs): json.JSONDecoder.__init__(self, object_hook=self.object_hook, *args, **kwargs) @classmethod - def object_hook(cls, dct: Dict[str, object]) -> object: - # Check if JSON object seems to be a deserializable AAS object (i.e. it has a modelType). Otherwise, the JSON - # object is returned as is, so it's possible to mix AAS objects with other data within a JSON structure. - if 'modelType' not in dct: - return dct + def _get_aas_class_parsers(cls) -> Dict[str, Callable[[Dict[str, object]], object]]: + """ + Returns the dictionary of AAS class parsers. + + The following dict specifies a constructor method for all AAS classes that may be identified using the + ``modelType`` attribute in their JSON representation. Each of those constructor functions takes the JSON + representation of an object and tries to construct a Python object from it. Embedded objects that have a + modelType themselves are expected to be converted to the correct PythonType already. Additionally, each + function takes a bool parameter ``failsafe``, which indicates weather to log errors and skip defective objects + instead of raising an Exception. - # The following dict specifies a constructor method for all AAS classes that may be identified using the - # ``modelType`` attribute in their JSON representation. Each of those constructor functions takes the JSON - # representation of an object and tries to construct a Python object from it. Embedded objects that have a - # modelType themselves are expected to be converted to the correct PythonType already. Additionally, each - # function takes a bool parameter ``failsafe``, which indicates weather to log errors and skip defective objects - # instead of raising an Exception. - AAS_CLASS_PARSERS: Dict[str, Callable[[Dict[str, object]], object]] = { + :return: The dictionary of AAS class parsers + """ + aas_class_parsers: Dict[str, Callable[[Dict[str, object]], object]] = { 'AssetAdministrationShell': cls._construct_asset_administration_shell, 'AssetInformation': cls._construct_asset_information, 'SpecificAssetId': cls._construct_specific_asset_id, @@ -189,6 +191,16 @@ def object_hook(cls, dct: Dict[str, object]) -> object: 'ReferenceElement': cls._construct_reference_element, 'DataSpecificationIec61360': cls._construct_data_specification_iec61360, } + return aas_class_parsers + + @classmethod + def object_hook(cls, dct: Dict[str, object]) -> object: + # Check if JSON object seems to be a deserializable AAS object (i.e. it has a modelType). Otherwise, the JSON + # object is returned as is, so it's possible to mix AAS objects with other data within a JSON structure. + if 'modelType' not in dct: + return dct + + AAS_CLASS_PARSERS = cls._get_aas_class_parsers() # Get modelType and constructor function if not isinstance(dct['modelType'], str): @@ -799,7 +811,9 @@ def _select_decoder(failsafe: bool, stripped: bool, decoder: Optional[Type[AASFr def read_aas_json_file_into(object_store: model.AbstractObjectStore, file: PathOrIO, replace_existing: bool = False, ignore_existing: bool = False, failsafe: bool = True, stripped: bool = False, - decoder: Optional[Type[AASFromJsonDecoder]] = None) -> Set[model.Identifier]: + decoder: Optional[Type[AASFromJsonDecoder]] = None, + keys_to_types: Iterable[Tuple[str, Any]] = JSON_AAS_TOP_LEVEL_KEYS_TO_TYPES) \ + -> Set[model.Identifier]: """ Read an Asset Administration Shell JSON file according to 'Details of the Asset Administration Shell', chapter 5.5 into a given object store. @@ -817,6 +831,7 @@ def read_aas_json_file_into(object_store: model.AbstractObjectStore, file: PathO See https://git.rwth-aachen.de/acplt/pyi40aas/-/issues/91 This parameter is ignored if a decoder class is specified. :param decoder: The decoder class used to decode the JSON objects + :param keys_to_types: A dictionary of JSON keys to expected types. This is used to check the type of the objects :raises KeyError: **Non-failsafe**: Encountered a duplicate identifier :raises KeyError: Encountered an identifier that already exists in the given ``object_store`` with both ``replace_existing`` and ``ignore_existing`` set to ``False`` @@ -843,45 +858,43 @@ def read_aas_json_file_into(object_store: model.AbstractObjectStore, file: PathO with cm as fp: data = json.load(fp, cls=decoder_) - for name, expected_type in (('assetAdministrationShells', model.AssetAdministrationShell), - ('submodels', model.Submodel), - ('conceptDescriptions', model.ConceptDescription)): + for name, expected_type in keys_to_types: try: lst = _get_ts(data, name, list) except (KeyError, TypeError): continue for item in lst: - error_message = "Expected a {} in list '{}', but found {}".format( - expected_type.__name__, name, repr(item)) + error_msg = f"Expected a {expected_type.__name__} in list '{name}', but found {repr(item)}." if isinstance(item, model.Identifiable): if not isinstance(item, expected_type): - if decoder_.failsafe: - logger.warning("{} was in wrong list '{}'; nevertheless, we'll use it".format(item, name)) - else: - raise TypeError(error_message) + if not decoder_.failsafe: + raise TypeError(f"{item} was in the wrong list '{name}'") + logger.warning(f"{item} was in the wrong list '{name}'; nevertheless, we'll use it") + if item.id in ret: - error_message = f"{item} has a duplicate identifier already parsed in the document!" + error_msg = f"{item} has a duplicate identifier already parsed in the document!" if not decoder_.failsafe: - raise KeyError(error_message) - logger.error(error_message + " skipping it...") + raise KeyError(error_msg) + logger.error(f"{error_msg} Skipping it...") continue + existing_element = object_store.get(item.id) if existing_element is not None: if not replace_existing: - error_message = f"object with identifier {item.id} already exists " \ - f"in the object store: {existing_element}!" + error_msg = f"Object with id '{item.id}' already exists in store: {existing_element}!" if not ignore_existing: - raise KeyError(error_message + f" failed to insert {item}!") - logger.info(error_message + f" skipping insertion of {item}...") + raise KeyError(f"{error_msg} Failed to insert {item}!") + logger.info(f"{error_msg} Skipping {item}...") continue object_store.discard(existing_element) + object_store.add(item) ret.add(item.id) elif decoder_.failsafe: - logger.error(error_message) + logger.error(f"{error_msg} Skipping it...") else: - raise TypeError(error_message) + raise TypeError(error_msg) return ret diff --git a/sdk/basyx/aas/adapter/json/json_serialization.py b/sdk/basyx/aas/adapter/json/json_serialization.py index f7d6626eb..024226d97 100644 --- a/sdk/basyx/aas/adapter/json/json_serialization.py +++ b/sdk/basyx/aas/adapter/json/json_serialization.py @@ -30,11 +30,12 @@ import contextlib import inspect import io -from typing import ContextManager, List, Dict, Optional, TextIO, Type, Callable, get_args +from typing import ContextManager, List, Dict, Optional, TextIO, Type, Callable, get_args, Iterable, Tuple import json from basyx.aas import model from .. import _generic +from .._generic import JSON_AAS_TOP_LEVEL_KEYS_TO_TYPES class AASToJsonEncoder(json.JSONEncoder): @@ -57,6 +58,40 @@ class AASToJsonEncoder(json.JSONEncoder): """ stripped = False + @classmethod + def _get_aas_class_serializers(cls) -> Dict[Type, Callable]: + mapping: Dict[Type, Callable] = { + model.AdministrativeInformation: cls._administrative_information_to_json, + model.AnnotatedRelationshipElement: cls._annotated_relationship_element_to_json, + model.AssetAdministrationShell: cls._asset_administration_shell_to_json, + model.AssetInformation: cls._asset_information_to_json, + model.BasicEventElement: cls._basic_event_element_to_json, + model.Blob: cls._blob_to_json, + model.Capability: cls._capability_to_json, + model.ConceptDescription: cls._concept_description_to_json, + model.DataSpecificationIEC61360: cls._data_specification_iec61360_to_json, + model.Entity: cls._entity_to_json, + model.Extension: cls._extension_to_json, + model.File: cls._file_to_json, + model.Key: cls._key_to_json, + model.LangStringSet: cls._lang_string_set_to_json, + model.MultiLanguageProperty: cls._multi_language_property_to_json, + model.Operation: cls._operation_to_json, + model.Property: cls._property_to_json, + model.Qualifier: cls._qualifier_to_json, + model.Range: cls._range_to_json, + model.Reference: cls._reference_to_json, + model.ReferenceElement: cls._reference_element_to_json, + model.RelationshipElement: cls._relationship_element_to_json, + model.Resource: cls._resource_to_json, + model.SpecificAssetId: cls._specific_asset_id_to_json, + model.Submodel: cls._submodel_to_json, + model.SubmodelElementCollection: cls._submodel_element_collection_to_json, + model.SubmodelElementList: cls._submodel_element_list_to_json, + model.ValueReferencePair: cls._value_reference_pair_to_json, + } + return mapping + def default(self, obj: object) -> object: """ The overwritten ``default`` method for :class:`json.JSONEncoder` @@ -64,36 +99,7 @@ def default(self, obj: object) -> object: :param obj: The object to serialize to json :return: The serialized object """ - mapping: Dict[Type, Callable] = { - model.AdministrativeInformation: self._administrative_information_to_json, - model.AnnotatedRelationshipElement: self._annotated_relationship_element_to_json, - model.AssetAdministrationShell: self._asset_administration_shell_to_json, - model.AssetInformation: self._asset_information_to_json, - model.BasicEventElement: self._basic_event_element_to_json, - model.Blob: self._blob_to_json, - model.Capability: self._capability_to_json, - model.ConceptDescription: self._concept_description_to_json, - model.DataSpecificationIEC61360: self._data_specification_iec61360_to_json, - model.Entity: self._entity_to_json, - model.Extension: self._extension_to_json, - model.File: self._file_to_json, - model.Key: self._key_to_json, - model.LangStringSet: self._lang_string_set_to_json, - model.MultiLanguageProperty: self._multi_language_property_to_json, - model.Operation: self._operation_to_json, - model.Property: self._property_to_json, - model.Qualifier: self._qualifier_to_json, - model.Range: self._range_to_json, - model.Reference: self._reference_to_json, - model.ReferenceElement: self._reference_element_to_json, - model.RelationshipElement: self._relationship_element_to_json, - model.Resource: self._resource_to_json, - model.SpecificAssetId: self._specific_asset_id_to_json, - model.Submodel: self._submodel_to_json, - model.SubmodelElementCollection: self._submodel_element_collection_to_json, - model.SubmodelElementList: self._submodel_element_list_to_json, - model.ValueReferencePair: self._value_reference_pair_to_json, - } + mapping = self._get_aas_class_serializers() for typ in mapping: if isinstance(obj, typ): mapping_method = mapping[typ] @@ -693,26 +699,34 @@ def _select_encoder(stripped: bool, encoder: Optional[Type[AASToJsonEncoder]] = return AASToJsonEncoder if not stripped else StrippedAASToJsonEncoder -def _create_dict(data: model.AbstractObjectStore) -> dict: - # separate different kind of objects - asset_administration_shells: List[model.AssetAdministrationShell] = [] - submodels: List[model.Submodel] = [] - concept_descriptions: List[model.ConceptDescription] = [] +def _create_dict(data: model.AbstractObjectStore, + keys_to_types: Iterable[Tuple[str, Type]] = JSON_AAS_TOP_LEVEL_KEYS_TO_TYPES) \ + -> Dict[str, List[model.Identifiable]]: + """ + Categorizes objects from an AbstractObjectStore into a dictionary based on their types. + + This function iterates over the objects in the provided AbstractObjectStore and groups them into lists + based on their types, as defined in the `keys_to_types` mapping. The resulting dictionary contains + keys corresponding to the names in `keys_to_types` and values as lists of objects of the respective types. + + :param data: An AbstractObjectStore containing objects to be categorized. + :param keys_to_types: An iterable of tuples where each tuple contains: + - A string key representing the category name. + - A type to match objects against. + :return: A dictionary where keys are category names and values are lists of objects of the corresponding types. + """ + objects: Dict[str, List[model.Identifiable]] = {} + for obj in data: - if isinstance(obj, model.AssetAdministrationShell): - asset_administration_shells.append(obj) - elif isinstance(obj, model.Submodel): - submodels.append(obj) - elif isinstance(obj, model.ConceptDescription): - concept_descriptions.append(obj) - dict_: Dict[str, List] = {} - if asset_administration_shells: - dict_['assetAdministrationShells'] = asset_administration_shells - if submodels: - dict_['submodels'] = submodels - if concept_descriptions: - dict_['conceptDescriptions'] = concept_descriptions - return dict_ + # Iterate through the mapping of category names to expected types + for name, expected_type in keys_to_types: + # Check if the object matches the expected type + if isinstance(obj, expected_type): + # Add the object to the appropriate category in the dictionary + objects.setdefault(name, []) + objects[name].append(obj) + break # Exit the inner loop once a match is found + return objects def object_store_to_json(data: model.AbstractObjectStore, stripped: bool = False, diff --git a/sdk/pyproject.toml b/sdk/pyproject.toml index 3a9fb6832..dae376df8 100644 --- a/sdk/pyproject.toml +++ b/sdk/pyproject.toml @@ -38,9 +38,7 @@ requires-python = ">=3.9" dependencies = [ "lxml>=4.2,<5", "python-dateutil>=2.8,<3", - "pyecma376-2>=1.0.1", - "urllib3>=1.26,<3", - "Werkzeug>=3.0.3,<4", + "pyecma376-2>=1.0.1" ] [project.optional-dependencies] diff --git a/sdk/test/adapter/json/test_json_deserialization.py b/sdk/test/adapter/json/test_json_deserialization.py index 9272bdf98..0dba6dbdb 100644 --- a/sdk/test/adapter/json/test_json_deserialization.py +++ b/sdk/test/adapter/json/test_json_deserialization.py @@ -37,7 +37,8 @@ def test_file_format_wrong_list(self) -> None: } ] }""" - with self.assertRaisesRegex(TypeError, r"submodels.*AssetAdministrationShell"): + with self.assertRaisesRegex(TypeError, r"AssetAdministrationShell.* was " + r"in the wrong list 'submodels'"): read_aas_json_file(io.StringIO(data), failsafe=False) with self.assertLogs(logging.getLogger(), level=logging.WARNING) as cm: read_aas_json_file(io.StringIO(data), failsafe=True) @@ -196,7 +197,7 @@ def get_clean_store() -> model.DictObjectStore: with self.assertLogs(logging.getLogger(), level=logging.INFO) as log_ctx: identifiers = read_aas_json_file_into(object_store, string_io, replace_existing=False, ignore_existing=True) self.assertEqual(len(identifiers), 0) - self.assertIn("already exists in the object store", log_ctx.output[0]) # type: ignore + self.assertIn("already exists in store", log_ctx.output[0]) # type: ignore submodel = object_store.pop() self.assertIsInstance(submodel, model.Submodel) self.assertEqual(submodel.id_short, "test123") @@ -204,7 +205,7 @@ def get_clean_store() -> model.DictObjectStore: string_io.seek(0) object_store = get_clean_store() - with self.assertRaisesRegex(KeyError, r"already exists in the object store"): + with self.assertRaisesRegex(KeyError, r"already exists in store"): identifiers = read_aas_json_file_into(object_store, string_io, replace_existing=False, ignore_existing=False) self.assertEqual(len(identifiers), 0) diff --git a/server/app/__init__.py b/server/app/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/server/app/adapter/__init__.py b/server/app/adapter/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/server/app/adapter/jsonization.py b/server/app/adapter/jsonization.py new file mode 100644 index 000000000..e3e21f463 --- /dev/null +++ b/server/app/adapter/jsonization.py @@ -0,0 +1,334 @@ +from typing import Dict, Set, Optional, Type + +import server.app.model as server_model +from basyx.aas import model +from basyx.aas.adapter._generic import ASSET_KIND_INVERSE, PathOrIO, ASSET_KIND, JSON_AAS_TOP_LEVEL_KEYS_TO_TYPES +from basyx.aas.adapter.json import AASToJsonEncoder +from basyx.aas.adapter.json.json_deserialization import _get_ts, AASFromJsonDecoder, \ + read_aas_json_file_into + +import logging +from typing import Callable + +logger = logging.getLogger(__name__) + +JSON_SERVER_AAS_TOP_LEVEL_KEYS_TO_TYPES = JSON_AAS_TOP_LEVEL_KEYS_TO_TYPES + ( + ('assetAdministrationShellDescriptors', server_model.AssetAdministrationShellDescriptor), + ('submodelDescriptors', server_model.SubmodelDescriptor) +) + + +class ServerAASFromJsonDecoder(AASFromJsonDecoder): + @classmethod + def _get_aas_class_parsers(cls) -> Dict[str, Callable[[Dict[str, object]], object]]: + aas_class_parsers = super()._get_aas_class_parsers() + aas_class_parsers.update({ + 'AssetAdministrationShellDescriptor': cls._construct_asset_administration_shell_descriptor, + 'SubmodelDescriptor': cls._construct_submodel_descriptor, + 'AssetLink': cls._construct_asset_link, + 'ProtocolInformation': cls._construct_protocol_information, + 'Endpoint': cls._construct_endpoint + }) + return aas_class_parsers + + # ################################################################################################## + # Utility Methods used in constructor methods to add general attributes (from abstract base classes) + # ################################################################################################## + + @classmethod + def _amend_abstract_attributes(cls, obj: object, dct: Dict[str, object]) -> None: + super()._amend_abstract_attributes(obj, dct) + + if isinstance(obj, server_model.Descriptor): + if 'description' in dct: + obj.description = cls._construct_lang_string_set(_get_ts(dct, 'description', list), + model.MultiLanguageTextType) + if 'displayName' in dct: + obj.display_name = cls._construct_lang_string_set(_get_ts(dct, 'displayName', list), + model.MultiLanguageNameType) + if 'extensions' in dct: + for extension in _get_ts(dct, 'extensions', list): + obj.extension.add(cls._construct_extension(extension)) + + @classmethod + def _construct_asset_administration_shell_descriptor( + cls, dct: Dict[str, object], + object_class=server_model.AssetAdministrationShellDescriptor) -> server_model.AssetAdministrationShellDescriptor: + ret = object_class(id_=_get_ts(dct, 'id', str)) + cls._amend_abstract_attributes(ret, dct) + if 'administration' in dct: + ret.administration = cls._construct_administrative_information(_get_ts(dct, 'administration', dict)) + if 'assetKind' in dct: + ret.asset_kind = ASSET_KIND_INVERSE[_get_ts(dct, 'assetKind', str)] + if 'assetType' in dct: + ret.asset_type = _get_ts(dct, 'assetType', str) + global_asset_id = None + if 'globalAssetId' in dct: + ret.global_asset_id = _get_ts(dct, 'globalAssetId', str) + specific_asset_id = set() + if 'specificAssetIds' in dct: + for desc_data in _get_ts(dct, "specificAssetIds", list): + specific_asset_id.add(cls._construct_specific_asset_id(desc_data, model.SpecificAssetId)) + if 'endpoints' in dct: + for endpoint_dct in _get_ts(dct, 'endpoints', list): + if 'protocolInformation' in endpoint_dct: + ret.endpoints.append( + cls._construct_endpoint(endpoint_dct, + server_model.Endpoint)) + elif 'href' in endpoint_dct: + protocol_info = server_model.ProtocolInformation( + href=_get_ts(endpoint_dct['href'], 'href', str), + endpoint_protocol=_get_ts(endpoint_dct['href'], + 'endpointProtocol', + str) if 'endpointProtocol' in + endpoint_dct[ + 'href'] else None, + endpoint_protocol_version=_get_ts( + endpoint_dct['href'], + 'endpointProtocolVersion', + list) if 'endpointProtocolVersion' in + endpoint_dct['href'] else None + ) + ret.endpoints.append(server_model.Endpoint( + protocol_information=protocol_info, + interface=_get_ts(endpoint_dct, 'interface', + str))) + if 'idShort' in dct: + ret.id_short = _get_ts(dct, 'idShort', str) + if 'submodelDescriptors' in dct: + for sm_dct in _get_ts(dct, 'submodelDescriptors', list): + ret.submodel_descriptors.append(cls._construct_submodel_descriptor( + sm_dct, + server_model.SubmodelDescriptor + )) + return ret + + @classmethod + def _construct_protocol_information(cls, dct: Dict[str, object], + object_class=server_model.ProtocolInformation) -> server_model.ProtocolInformation: + ret = object_class( + href=_get_ts(dct, 'href', str), + endpoint_protocol=_get_ts(dct, 'endpointProtocol', + str) if 'endpointProtocol' in dct else None, + endpoint_protocol_version=_get_ts(dct, + 'endpointProtocolVersion', + list) if 'endpointProtocolVersion' in dct else None, + subprotocol=_get_ts(dct, 'subprotocol', + str) if 'subprotocol' in dct else None, + subprotocol_body=_get_ts(dct, 'subprotocolBody', + str) if 'subprotocolBody' in dct else None, + subprotocol_body_encoding=_get_ts(dct, + 'subprotocolBodyEncoding', + str) if 'subprotocolBodyEncoding' in dct else None + ) + return ret + + @classmethod + def _construct_endpoint(cls, dct: Dict[str, object], + object_class=server_model.Endpoint) -> server_model.Endpoint: + ret = object_class( + protocol_information=cls._construct_protocol_information( + _get_ts(dct, 'protocolInformation', dict), + server_model.ProtocolInformation + ), + interface=_get_ts(dct, 'interface', + str) + ) + cls._amend_abstract_attributes(ret, dct) + return ret + + @classmethod + def _construct_submodel_descriptor( + cls, dct: Dict[str, object], + object_class=server_model.SubmodelDescriptor) -> server_model.SubmodelDescriptor: + ret = object_class(id_=_get_ts(dct, 'id', str), + endpoints=[]) + cls._amend_abstract_attributes(ret, dct) + for endpoint_dct in _get_ts(dct, 'endpoints', list): + if 'protocolInformation' in endpoint_dct: + ret.endpoints.append( + cls._construct_endpoint(endpoint_dct, + server_model.Endpoint)) + elif 'href' in endpoint_dct: + protocol_info = server_model.ProtocolInformation( + href=_get_ts(endpoint_dct['href'], 'href', str), + endpoint_protocol=_get_ts(endpoint_dct['href'], + 'endpointProtocol', + str) if 'endpointProtocol' in + endpoint_dct[ + 'href'] else None, + endpoint_protocol_version=_get_ts( + endpoint_dct['href'], + 'endpointProtocolVersion', + list) if 'endpointProtocolVersion' in + endpoint_dct['href'] else None + ) + ret.endpoints.append(server_model.Endpoint( + protocol_information=protocol_info, + interface=_get_ts(endpoint_dct, 'interface', + str))) + if 'administration' in dct: + ret.administration = cls._construct_administrative_information( + _get_ts(dct, 'administration', dict)) + if 'idShort' in dct: + ret.id_short = _get_ts(dct, 'idShort', str) + if 'semanticId' in dct: + ret.semantic_id = cls._construct_reference(_get_ts(dct, 'semanticId', dict)) + if 'supplementalSemanticIds' in dct: + for ref in _get_ts(dct, 'supplementalSemanticIds', list): + ret.supplemental_semantic_id.append(cls._construct_reference(ref)) + return ret + + @classmethod + def _construct_asset_link( + cls, dct: Dict[str, object], object_class=server_model.AssetLink) -> server_model.AssetLink: + ret = object_class(name=_get_ts(dct, 'name', str), + value=_get_ts(dct, 'value', str)) + return ret + + +class ServerStrictAASFromJsonDecoder(ServerAASFromJsonDecoder): + """ + A strict version of the AASFromJsonDecoder class for deserializing Asset Administration Shell data from the + official JSON format + + This version has set ``failsafe = False``, which will lead to Exceptions raised for every missing attribute or wrong + object type. + """ + failsafe = False + + +class ServerStrippedAASFromJsonDecoder(ServerAASFromJsonDecoder): + """ + Decoder for stripped JSON objects. Used in the HTTP adapter. + """ + stripped = True + + +class ServerStrictStrippedAASFromJsonDecoder(ServerStrictAASFromJsonDecoder, ServerStrippedAASFromJsonDecoder): + """ + Non-failsafe decoder for stripped JSON objects. + """ + pass + + +def read_server_aas_json_file_into(object_store: model.AbstractObjectStore, file: PathOrIO, + replace_existing: bool = False, + ignore_existing: bool = False, failsafe: bool = True, stripped: bool = False, + decoder: Optional[Type[AASFromJsonDecoder]] = None) -> Set[model.Identifier]: + return read_aas_json_file_into(object_store=object_store, file=file, replace_existing=replace_existing, + ignore_existing=ignore_existing, failsafe=failsafe, stripped=stripped, + decoder=decoder, keys_to_types=JSON_SERVER_AAS_TOP_LEVEL_KEYS_TO_TYPES) + + +class ServerAASToJsonEncoder(AASToJsonEncoder): + + @classmethod + def _get_aas_class_serializers(cls) -> Dict[Type, Callable]: + serializers = super()._get_aas_class_serializers() + serializers.update({ + server_model.AssetAdministrationShellDescriptor: cls._asset_administration_shell_descriptor_to_json, + server_model.SubmodelDescriptor: cls._submodel_descriptor_to_json, + server_model.Endpoint: cls._endpoint_to_json, + server_model.ProtocolInformation: cls._protocol_information_to_json, + server_model.AssetLink: cls._asset_link_to_json + }) + return serializers + + @classmethod + def _abstract_classes_to_json(cls, obj: object) -> Dict[str, object]: + data: Dict[str, object] = super()._abstract_classes_to_json(obj) + if isinstance(obj, server_model.Descriptor): + if obj.description: + data['description'] = obj.description + if obj.display_name: + data['displayName'] = obj.display_name + if obj.extension: + data['extensions'] = list(obj.extension) + return data + + @classmethod + def _asset_administration_shell_descriptor_to_json(cls, obj: server_model.AssetAdministrationShellDescriptor) -> \ + Dict[str, object]: + """ + serialization of an object from class AssetAdministrationShell to json + + :param obj: object of class AssetAdministrationShell + :return: dict with the serialized attributes of this object + """ + data = cls._abstract_classes_to_json(obj) + data.update(cls._namespace_to_json(obj)) + data['id'] = obj.id + if obj.administration: + data['administration'] = obj.administration + if obj.asset_kind: + data['assetKind'] = ASSET_KIND[obj.asset_kind] + if obj.asset_type: + data['assetType'] = obj.asset_type + if obj.global_asset_id: + data['globalAssetId'] = obj.global_asset_id + if obj.specific_asset_id: + data['specificAssetIds'] = list(obj.specific_asset_id) + if obj.endpoints: + data['endpoints'] = list(obj.endpoints) + if obj.id_short: + data['idShort'] = obj.id_short + if obj.submodel_descriptors: + data['submodelDescriptors'] = list(obj.submodel_descriptors) + return data + + @classmethod + def _protocol_information_to_json(cls, + obj: server_model.ProtocolInformation) -> \ + Dict[str, object]: + data = cls._abstract_classes_to_json(obj) + + data['href'] = obj.href + if obj.endpoint_protocol: + data['endpointProtocol'] = obj.endpoint_protocol + if obj.endpoint_protocol_version: + data['endpointProtocolVersion'] = obj.endpoint_protocol_version + if obj.subprotocol: + data['subprotocol'] = obj.subprotocol + if obj.subprotocol_body: + data['subprotocolBody'] = obj.subprotocol_body + if obj.subprotocol_body_encoding: + data['subprotocolBodyEncoding'] = obj.subprotocol_body_encoding + return data + + @classmethod + def _endpoint_to_json(cls, obj: server_model.Endpoint) -> Dict[str, object]: + data = cls._abstract_classes_to_json(obj) + data['protocolInformation'] = cls._protocol_information_to_json( + obj.protocol_information) + data['interface'] = obj.interface + return data + + @classmethod + def _submodel_descriptor_to_json(cls, obj: server_model.SubmodelDescriptor) -> Dict[str, object]: + """ + serialization of an object from class Submodel to json + + :param obj: object of class Submodel + :return: dict with the serialized attributes of this object + """ + data = cls._abstract_classes_to_json(obj) + data['id'] = obj.id + data['endpoints'] = [cls._endpoint_to_json(ep) for ep in + obj.endpoints] + if obj.id_short: + data['idShort'] = obj.id_short + if obj.administration: + data['administration'] = obj.administration + if obj.semantic_id: + data['semanticId'] = obj.semantic_id + if obj.supplemental_semantic_id: + data['supplementalSemanticIds'] = list(obj.supplemental_semantic_id) + return data + + @classmethod + def _asset_link_to_json(cls, obj: server_model.AssetLink) -> Dict[str, object]: + data = cls._abstract_classes_to_json(obj) + data['name'] = obj.name + data['value'] = obj.value + return data diff --git a/server/app/interfaces/base.py b/server/app/interfaces/base.py new file mode 100644 index 000000000..04100051e --- /dev/null +++ b/server/app/interfaces/base.py @@ -0,0 +1,452 @@ +# Copyright (c) 2025 the Eclipse BaSyx Authors +# +# This program and the accompanying materials are made available under the terms of the MIT License, available in +# the LICENSE file of this project. +# +# SPDX-License-Identifier: MIT +import abc +import datetime +import enum +import io +import itertools +import json +from typing import Iterable, Type, Iterator, Tuple, Optional, List, Union, Dict, Callable, TypeVar + +import werkzeug.exceptions +import werkzeug.routing +import werkzeug.utils +from lxml import etree +from werkzeug import Response, Request +from werkzeug.exceptions import NotFound, BadRequest +from werkzeug.routing import MapAdapter + +from basyx.aas import model +from basyx.aas.adapter._generic import XML_NS_MAP +from basyx.aas.adapter.xml import xml_serialization, XMLConstructables, read_aas_xml_element +from basyx.aas.model import AbstractObjectStore +from server.app import model as server_model +from server.app.adapter.jsonization import ServerAASToJsonEncoder, ServerStrictAASFromJsonDecoder, \ + ServerStrictStrippedAASFromJsonDecoder +from server.app.util.converters import base64url_decode + + +T = TypeVar("T") + + +@enum.unique +class MessageType(enum.Enum): + UNDEFINED = enum.auto() + INFO = enum.auto() + WARNING = enum.auto() + ERROR = enum.auto() + EXCEPTION = enum.auto() + + def __str__(self): + return self.name.capitalize() + + +class Message: + def __init__(self, code: str, text: str, message_type: MessageType = MessageType.UNDEFINED, + timestamp: Optional[datetime.datetime] = None): + self.code: str = code + self.text: str = text + self.message_type: MessageType = message_type + self.timestamp: datetime.datetime = timestamp if timestamp is not None \ + else datetime.datetime.now(datetime.timezone.utc) + + +class Result: + def __init__(self, success: bool, messages: Optional[List[Message]] = None): + if messages is None: + messages = [] + self.success: bool = success + self.messages: List[Message] = messages + + +ResponseData = Union[Result, object, List[object]] + + +class APIResponse(abc.ABC, Response): + @abc.abstractmethod + def __init__(self, obj: Optional[ResponseData] = None, cursor: Optional[int] = None, + stripped: bool = False, *args, **kwargs): + super().__init__(*args, **kwargs) + if obj is None: + self.status_code = 204 + else: + self.data = self.serialize(obj, cursor, stripped) + + @abc.abstractmethod + def serialize(self, obj: ResponseData, cursor: Optional[int], stripped: bool) -> str: + pass + + +class JsonResponse(APIResponse): + def __init__(self, *args, content_type="application/json", **kwargs): + super().__init__(*args, **kwargs, content_type=content_type) + + def serialize(self, obj: ResponseData, cursor: Optional[int], stripped: bool) -> str: + if cursor is None: + data = obj + else: + data = { + "paging_metadata": {"cursor": str(cursor)}, + "result": obj + } + return json.dumps( + data, + cls=StrippedResultToJsonEncoder if stripped else ResultToJsonEncoder, + separators=(",", ":") + ) + + +class XmlResponse(APIResponse): + def __init__(self, *args, content_type="application/xml", **kwargs): + super().__init__(*args, **kwargs, content_type=content_type) + + def serialize(self, obj: ResponseData, cursor: Optional[int], stripped: bool) -> str: + root_elem = etree.Element("response", nsmap=XML_NS_MAP) + if cursor is not None: + root_elem.set("cursor", str(cursor)) + if isinstance(obj, Result): + result_elem = self.result_to_xml(obj, **XML_NS_MAP) + for child in result_elem: + root_elem.append(child) + elif isinstance(obj, list): + for item in obj: + item_elem = xml_serialization.object_to_xml_element(item) + root_elem.append(item_elem) + else: + obj_elem = xml_serialization.object_to_xml_element(obj) + for child in obj_elem: + root_elem.append(child) + etree.cleanup_namespaces(root_elem) + xml_str = etree.tostring(root_elem, xml_declaration=True, encoding="utf-8") + return xml_str # type: ignore[return-value] + + @classmethod + def result_to_xml(cls, result: Result, **kwargs) -> etree._Element: + result_elem = etree.Element("result", **kwargs) + success_elem = etree.Element("success") + success_elem.text = xml_serialization.boolean_to_xml(result.success) + messages_elem = etree.Element("messages") + for message in result.messages: + messages_elem.append(cls.message_to_xml(message)) + + result_elem.append(success_elem) + result_elem.append(messages_elem) + return result_elem + + @classmethod + def message_to_xml(cls, message: Message) -> etree._Element: + message_elem = etree.Element("message") + message_type_elem = etree.Element("messageType") + message_type_elem.text = str(message.message_type) + text_elem = etree.Element("text") + text_elem.text = message.text + code_elem = etree.Element("code") + code_elem.text = message.code + timestamp_elem = etree.Element("timestamp") + timestamp_elem.text = message.timestamp.isoformat() + + message_elem.append(message_type_elem) + message_elem.append(text_elem) + message_elem.append(code_elem) + message_elem.append(timestamp_elem) + return message_elem + + +class XmlResponseAlt(XmlResponse): + def __init__(self, *args, content_type="text/xml", **kwargs): + super().__init__(*args, **kwargs, content_type=content_type) + + +class ResultToJsonEncoder(ServerAASToJsonEncoder): + @classmethod + def _result_to_json(cls, result: Result) -> Dict[str, object]: + return { + "success": result.success, + "messages": result.messages + } + + @classmethod + def _message_to_json(cls, message: Message) -> Dict[str, object]: + return { + "messageType": message.message_type, + "text": message.text, + "code": message.code, + "timestamp": message.timestamp.isoformat() + } + + def default(self, obj: object) -> object: + if isinstance(obj, Result): + return self._result_to_json(obj) + if isinstance(obj, Message): + return self._message_to_json(obj) + if isinstance(obj, MessageType): + return str(obj) + return super().default(obj) + + +class StrippedResultToJsonEncoder(ResultToJsonEncoder): + stripped = True + + +class BaseWSGIApp: + url_map: werkzeug.routing.Map + + # TODO: the parameters can be typed via builtin wsgiref with Python 3.11+ + def __call__(self, environ, start_response) -> Iterable[bytes]: + response: Response = self.handle_request(Request(environ)) + return response(environ, start_response) + + @classmethod + def _get_slice(cls, request: Request, iterator: Iterable[T]) -> Tuple[Iterator[T], int]: + limit_str = request.args.get('limit', default="10") + cursor_str = request.args.get('cursor', default="1") + try: + limit, cursor = int(limit_str), int(cursor_str) - 1 # cursor is 1-indexed + if limit < 0 or cursor < 0: + raise ValueError + except ValueError: + raise BadRequest("Limit can not be negative, cursor must be positive!") + start_index = cursor + end_index = cursor + limit + paginated_slice = itertools.islice(iterator, start_index, end_index) + return paginated_slice, end_index + + def handle_request(self, request: Request): + map_adapter: MapAdapter = self.url_map.bind_to_environ(request.environ) + try: + response_t = self.get_response_type(request) + except werkzeug.exceptions.NotAcceptable as e: + return e + + try: + endpoint, values = map_adapter.match() + return endpoint(request, values, response_t=response_t, map_adapter=map_adapter) + + # any raised error that leaves this function will cause a 500 internal server error + # so catch raised http exceptions and return them + except werkzeug.exceptions.HTTPException as e: + return self.http_exception_to_response(e, response_t) + + @staticmethod + def get_response_type(request: Request) -> Type[APIResponse]: + response_types: Dict[str, Type[APIResponse]] = { + "application/json": JsonResponse, + "application/xml": XmlResponse, + "text/xml": XmlResponseAlt + } + if len(request.accept_mimetypes) == 0 or request.accept_mimetypes.best in (None, "*/*"): + return JsonResponse + mime_type = request.accept_mimetypes.best_match(response_types) + if mime_type is None: + raise werkzeug.exceptions.NotAcceptable("This server supports the following content types: " + + ", ".join(response_types.keys())) + return response_types[mime_type] + + @staticmethod + def http_exception_to_response(exception: werkzeug.exceptions.HTTPException, response_type: Type[APIResponse]) \ + -> APIResponse: + headers = exception.get_headers() + location = exception.get_response().location + if location is not None: + headers.append(("Location", location)) + if exception.code and exception.code >= 400: + message = Message(type(exception).__name__, + exception.description if exception.description is not None else "", + MessageType.ERROR) + result = Result(False, [message]) + else: + result = Result(False) + return response_type(result, status=exception.code, headers=headers) + + +class ObjectStoreWSGIApp(BaseWSGIApp): + object_store: AbstractObjectStore + + def _get_all_obj_of_type(self, type_: Type[model.provider._IT]) -> Iterator[model.provider._IT]: + for obj in self.object_store: + if isinstance(obj, type_): + obj.update() + yield obj + + def _get_obj_ts(self, identifier: model.Identifier, type_: Type[model.provider._IT]) -> model.provider._IT: + identifiable = self.object_store.get(identifier) + if not isinstance(identifiable, type_): + raise NotFound(f"No {type_.__name__} with {identifier} found!") + identifiable.update() + return identifiable + +class HTTPApiDecoder: + # these are the types we can construct (well, only the ones we need) + type_constructables_map = { + model.AssetAdministrationShell: XMLConstructables.ASSET_ADMINISTRATION_SHELL, + model.AssetInformation: XMLConstructables.ASSET_INFORMATION, + model.ModelReference: XMLConstructables.MODEL_REFERENCE, + model.SpecificAssetId: XMLConstructables.SPECIFIC_ASSET_ID, + model.Qualifier: XMLConstructables.QUALIFIER, + model.Submodel: XMLConstructables.SUBMODEL, + model.SubmodelElement: XMLConstructables.SUBMODEL_ELEMENT, + model.Reference: XMLConstructables.REFERENCE, + } + + @classmethod + def check_type_support(cls, type_: type): + tolerated_types = ( + server_model.AssetAdministrationShellDescriptor, + server_model.SubmodelDescriptor, + server_model.AssetLink, + ) + if type_ not in cls.type_constructables_map and type_ not in tolerated_types: + raise TypeError(f"Parsing {type_} is not supported!") + + @classmethod + def assert_type(cls, obj: object, type_: Type[T]) -> T: + if not isinstance(obj, type_): + raise BadRequest(f"Object {obj!r} is not of type {type_.__name__}!") + return obj + + @classmethod + def json_list(cls, data: Union[str, bytes], expect_type: Type[T], stripped: bool, expect_single: bool) -> List[T]: + cls.check_type_support(expect_type) + decoder: Type[ServerStrictAASFromJsonDecoder] = ServerStrictStrippedAASFromJsonDecoder if stripped \ + else ServerStrictAASFromJsonDecoder + try: + parsed = json.loads(data, cls=decoder) + if isinstance(parsed, list) and expect_single: + raise BadRequest(f"Expected a single object of type {expect_type.__name__}, got {parsed!r}!") + if not isinstance(parsed, list) and not expect_single: + raise BadRequest(f"Expected List[{expect_type.__name__}], got {parsed!r}!") + parsed = [parsed] if not isinstance(parsed, list) else parsed + + # TODO: the following is ugly, but necessary because references aren't self-identified objects + # in the json schema + # TODO: json deserialization will always create an ModelReference[Submodel], xml deserialization determines + # that automatically + mapping = { + model.ModelReference: decoder._construct_model_reference, # type: ignore[assignment] + model.AssetInformation: decoder._construct_asset_information, # type: ignore[assignment] + model.SpecificAssetId: decoder._construct_specific_asset_id, # type: ignore[assignment] + model.Reference: decoder._construct_reference, # type: ignore[assignment] + model.Qualifier: decoder._construct_qualifier, # type: ignore[assignment] + server_model.AssetAdministrationShellDescriptor: + decoder._construct_asset_administration_shell_descriptor, # type: ignore[assignment] + server_model.SubmodelDescriptor: decoder._construct_submodel_descriptor, # type: ignore[assignment] + server_model.AssetLink: decoder._construct_asset_link, # type: ignore[assignment] + } + + constructor: Optional[Callable[..., T]] = mapping.get(expect_type) + args = [] + if expect_type is model.ModelReference: + args.append(model.Submodel) + + if constructor is not None: + # construct elements that aren't self-identified + return [constructor(obj, *args) for obj in parsed] + + except (KeyError, ValueError, TypeError, json.JSONDecodeError, model.AASConstraintViolation) as e: + raise BadRequest(str(e)) from e + + return [cls.assert_type(obj, expect_type) for obj in parsed] + + @classmethod + def base64url_json_list(cls, data: str, expect_type: Type[T], stripped: bool, expect_single: bool) -> List[T]: + data = base64url_decode(data) + return cls.json_list(data, expect_type, stripped, expect_single) + + @classmethod + def json(cls, data: Union[str, bytes], expect_type: Type[T], stripped: bool) -> T: + return cls.json_list(data, expect_type, stripped, True)[0] + + @classmethod + def base64url_json(cls, data: str, expect_type: Type[T], stripped: bool) -> T: + data = base64url_decode(data) + return cls.json_list(data, expect_type, stripped, True)[0] + + @classmethod + def xml(cls, data: bytes, expect_type: Type[T], stripped: bool) -> T: + cls.check_type_support(expect_type) + try: + xml_data = io.BytesIO(data) + rv = read_aas_xml_element(xml_data, cls.type_constructables_map[expect_type], + stripped=stripped, failsafe=False) + except (KeyError, ValueError) as e: + # xml deserialization creates an error chain. since we only return one error, return the root cause + f: BaseException = e + while f.__cause__ is not None: + f = f.__cause__ + raise BadRequest(str(f)) from e + except (etree.XMLSyntaxError, model.AASConstraintViolation) as e: + raise BadRequest(str(e)) from e + return cls.assert_type(rv, expect_type) + + @classmethod + def request_body(cls, request: Request, expect_type: Type[T], stripped: bool) -> T: + """ + TODO: werkzeug documentation recommends checking the content length before retrieving the body to prevent + running out of memory. but it doesn't state how to check the content length + also: what would be a reasonable maximum content length? the request body isn't limited by the xml/json + schema + In the meeting (25.11.2020) we discussed, this may refer to a reverse proxy in front of this WSGI app, + which should limit the maximum content length. + """ + valid_content_types = ("application/json", "application/xml", "text/xml") + + if request.mimetype not in valid_content_types: + raise werkzeug.exceptions.UnsupportedMediaType( + f"Invalid content-type: {request.mimetype}! Supported types: " + + ", ".join(valid_content_types)) + + if request.mimetype == "application/json": + return cls.json(request.get_data(), expect_type, stripped) + return cls.xml(request.get_data(), expect_type, stripped) + + @classmethod + def request_body_list(cls, request: Request, expect_type: Type[T], stripped: bool) -> List[T]: + """ + Deserializes the request body to an instance (or list of instances) + of the expected type. + """ + # TODO: Refactor this method and request_body to avoid code duplication + valid_content_types = ("application/json", "application/xml", "text/xml") + + if request.mimetype not in valid_content_types: + raise werkzeug.exceptions.UnsupportedMediaType( + f"Invalid content-type: {request.mimetype}! Supported types: " + ", ".join(valid_content_types) + ) + + if request.mimetype == "application/json": + raw_data = request.get_data() + try: + parsed = json.loads(raw_data) + except Exception as e: + raise werkzeug.exceptions.BadRequest(f"Invalid JSON: {e}") + # Prüfe, ob parsed ein Array ist: + if isinstance(parsed, list): + # Für jedes Element wird die Konvertierung angewandt. + return [cls._convert_single_json_item(item, expect_type, stripped) for item in parsed] # type: ignore + else: + return cls._convert_single_json_item(parsed, expect_type, stripped) + else: + return cls.xml(request.get_data(), expect_type, stripped) + + @classmethod + def _convert_single_json_item(cls, data: any, expect_type: Type[T], stripped: bool) -> T: + """ + Konvertiert ein einzelnes JSON-Objekt (als Python-Dict) in ein Objekt vom Typ expect_type. + Hierbei wird das Dictionary zuerst wieder in einen JSON-String serialisiert und als Bytes übergeben. + """ + json_bytes = json.dumps(data).encode("utf-8") + return cls.json(json_bytes, expect_type, stripped) + + +def is_stripped_request(request: Request) -> bool: + level = request.args.get("level") + if level not in {"deep", "core", None}: + raise BadRequest(f"Level {level} is not a valid level!") + extent = request.args.get("extent") + if extent is not None: + raise werkzeug.exceptions.NotImplemented(f"The parameter extent is not yet implemented for this server!") + return level == "core" diff --git a/server/app/interfaces/discovery.py b/server/app/interfaces/discovery.py new file mode 100644 index 000000000..4f456f789 --- /dev/null +++ b/server/app/interfaces/discovery.py @@ -0,0 +1,214 @@ +""" +This module implements the Discovery interface defined in the 'Specification of the Asset Administration Shell Part 2 – Application Programming Interface'. +""" + +import abc +from typing import Dict, List, Set, Any + +import werkzeug.exceptions +from pymongo import MongoClient +from pymongo.collection import Collection +from werkzeug.routing import Rule, Submount +from werkzeug.wrappers import Request, Response + +from basyx.aas import model +from server.app.util.converters import Base64URLConverter +from server.app.interfaces.base import BaseWSGIApp, HTTPApiDecoder +from .. import model as server_model +from ..adapter.jsonization import ServerAASToJsonEncoder + +encoder=ServerAASToJsonEncoder() + +class AbstractDiscoveryStore(metaclass=abc.ABCMeta): + aas_id_to_asset_ids: Any + asset_id_to_aas_ids: Any + + @abc.abstractmethod + def __init__(self): + pass + + @abc.abstractmethod + def get_all_specific_asset_ids_by_aas_id(self, aas_id: model.Identifier) -> List[model.SpecificAssetId]: + pass + + @abc.abstractmethod + def add_specific_asset_ids_to_aas(self, aas_id: model.Identifier, asset_ids: List[model.SpecificAssetId]) -> None: + pass + + @abc.abstractmethod + def delete_specific_asset_ids_by_aas_id(self, aas_id: model.Identifier) -> None: + pass + + @abc.abstractmethod + def search_aas_ids_by_asset_link(self, asset_link: server_model.AssetLink) -> List[model.Identifier]: + pass + + @abc.abstractmethod + def _add_aas_id_to_specific_asset_id(self, asset_id: model.SpecificAssetId, aas_identifier: model.Identifier) -> None: + pass + + @abc.abstractmethod + def _delete_aas_id_from_specific_asset_ids(self, asset_id: model.SpecificAssetId, aas_id: model.Identifier) -> None: + pass + + + +class InMemoryDiscoveryStore(AbstractDiscoveryStore): + def __init__(self): + self.aas_id_to_asset_ids: Dict[model.Identifier, Set[model.SpecificAssetId]] = {} + self.asset_id_to_aas_ids: Dict[model.SpecificAssetId, Set[model.Identifier]] = {} + + def get_all_specific_asset_ids_by_aas_id(self, aas_id: model.Identifier) -> List[model.SpecificAssetId]: + return list(self.aas_id_to_asset_ids.get(aas_id, set())) + + def add_specific_asset_ids_to_aas(self, aas_id: model.Identifier, + asset_ids: List[model.SpecificAssetId]) -> None: + serialized_assets = [encoder.default(asset_id) for asset_id in asset_ids] + if aas_id in self.aas_id_to_asset_ids: + for asset in serialized_assets: + if asset not in self.aas_id_to_asset_ids[aas_id]: + self.aas_id_to_asset_ids[aas_id].append(asset) + else: + self.aas_id_to_asset_ids[aas_id] = serialized_assets[:] + + def delete_specific_asset_ids_by_aas_id(self, aas_id: model.Identifier) -> None: + key = aas_id + if key in self.aas_id_to_asset_ids: + del self.aas_id_to_asset_ids[key] + + def search_aas_ids_by_asset_link(self, asset_link: server_model.AssetLink) -> List[model.Identifier]: + result = [] + for asset_key, aas_ids in self.asset_id_to_aas_ids.items(): + expected_key = f"{asset_link.name}:{asset_link.value}" + if asset_key == expected_key: + result.extend(list(aas_ids)) + return result + + def _add_aas_id_to_specific_asset_id(self, asset_id: model.SpecificAssetId, aas_id: model.Identifier) -> None: + asset_key = f"{asset_id.name}:{asset_id.value}" + if asset_key in self.asset_id_to_aas_ids: + self.asset_id_to_aas_ids[asset_key].add(aas_id) + else: + self.asset_id_to_aas_ids[asset_key] = {aas_id} + + def _delete_aas_id_from_specific_asset_ids(self, asset_id: model.SpecificAssetId, aas_id: model.Identifier) -> None: + asset_key = f"{asset_id.name}:{asset_id.value}" + if asset_key in self.asset_id_to_aas_ids: + self.asset_id_to_aas_ids[asset_key].discard(aas_id) + + +class MongoDiscoveryStore(AbstractDiscoveryStore): + def __init__(self, + uri: str = "mongodb://localhost:27017", + db_name: str = "basyx", + coll_aas_to_assets: str = "aas_to_assets", + coll_asset_to_aas: str = "asset_to_aas"): + self.client = MongoClient(uri) + self.db = self.client[db_name] + self.coll_aas_to_assets: Collection = self.db[coll_aas_to_assets] + self.coll_asset_to_aas: Collection = self.db[coll_asset_to_aas] + # Create an index for fast asset reverse lookups. + self.coll_asset_to_aas.create_index("_id") + + def get_all_specific_asset_ids_by_aas_id(self, aas_id: model.Identifier) -> List[model.SpecificAssetId]: + key = aas_id + doc = self.coll_aas_to_assets.find_one({"_id": key}) + return doc["asset_ids"] if doc and "asset_ids" in doc else [] + + def add_specific_asset_ids_to_aas(self, aas_id: model.Identifier, asset_ids: List[model.SpecificAssetId]) -> None: + key = aas_id + # Convert each SpecificAssetId using the serialization helper. + serializable_assets = [encoder.default(asset_id) for asset_id in asset_ids] + self.coll_aas_to_assets.update_one( + {"_id": key}, + {"$addToSet": {"asset_ids": {"$each": serializable_assets}}}, + upsert=True + ) + + def delete_specific_asset_ids_by_aas_id(self, aas_id: model.Identifier) -> None: + key = aas_id + self.coll_aas_to_assets.delete_one({"_id": key}) + + def search_aas_ids_by_asset_link(self, asset_link: server_model.AssetLink) -> List[model.Identifier]: + # Query MongoDB for specificAssetIds where 'name' and 'value' match + doc = self.coll_asset_to_aas.find_one({ + "name": asset_link.name, + "value": asset_link.value + }) + return doc["aas_ids"] if doc and "aas_ids" in doc else [] + + def _add_aas_id_to_specific_asset_id(self, asset_id: model.SpecificAssetId, aas_id: model.Identifier) -> None: + asset_key = str(encoder.default(asset_id)) + self.coll_asset_to_aas.update_one( + {"_id": asset_key}, + {"$addToSet": {"aas_ids": aas_id}}, + upsert=True + ) + + def _delete_aas_id_from_specific_asset_ids(self, asset_id: model.SpecificAssetId, aas_id: model.Identifier) -> None: + asset_key = str(encoder.default(asset_id)) + self.coll_asset_to_aas.update_one( + {"_id": asset_key}, + {"$pull": {"aas_ids": aas_id}} + ) + + +class DiscoveryAPI(BaseWSGIApp): + def __init__(self, + persistent_store: AbstractDiscoveryStore, base_path: str = "/api/v3.0"): + self.persistent_store: AbstractDiscoveryStore = persistent_store + self.url_map = werkzeug.routing.Map([ + Submount(base_path, [ + Rule("/lookup/shellsByAssetLink", methods=["POST"], + endpoint=self.search_all_aas_ids_by_asset_link), + Submount("/lookup/shells", [ + Rule("/", methods=["GET"], + endpoint=self.get_all_specific_asset_ids_by_aas_id), + Rule("/", methods=["POST"], + endpoint=self.post_all_asset_links_by_id), + Rule("/", methods=["DELETE"], + endpoint=self.delete_all_asset_links_by_id), + ]), + ]) + ], converters={ + "base64url": Base64URLConverter + }, strict_slashes=False) + + def search_all_aas_ids_by_asset_link(self, request: Request, url_args: dict, response_t: type, + **_kwargs) -> Response: + asset_links = HTTPApiDecoder.request_body_list(request, server_model.AssetLink, False) + matching_aas_keys = set() + for asset_link in asset_links: + aas_keys = self.persistent_store.search_aas_ids_by_asset_link(asset_link) + matching_aas_keys.update(aas_keys) + matching_aas_keys = list(matching_aas_keys) + paginated_slice, cursor = self._get_slice(request, matching_aas_keys) + return response_t(list(paginated_slice), cursor=cursor) + + def get_all_specific_asset_ids_by_aas_id(self, request: Request, url_args: dict, response_t: type, **_kwargs) -> Response: + aas_identifier = url_args.get("aas_id") + asset_ids = self.persistent_store.get_all_specific_asset_ids_by_aas_id(aas_identifier) + return response_t(asset_ids) + + def post_all_asset_links_by_id(self, request: Request, url_args: dict, response_t: type, **_kwargs) -> Response: + aas_identifier = url_args.get("aas_id") + specific_asset_ids = HTTPApiDecoder.request_body_list(request, model.SpecificAssetId, False) + self.persistent_store.add_specific_asset_ids_to_aas(aas_identifier, specific_asset_ids) + for asset_id in specific_asset_ids: + self.persistent_store._add_aas_id_to_specific_asset_id(asset_id, aas_identifier) + updated = {aas_identifier: self.persistent_store.get_all_specific_asset_ids_by_aas_id(aas_identifier)} + return response_t(updated) + + def delete_all_asset_links_by_id(self, request: Request, url_args: dict, response_t: type, **_kwargs) -> Response: + aas_identifier = url_args.get("aas_id") + self.persistent_store.delete_specific_asset_ids_by_aas_id(aas_identifier) + for key in list(self.persistent_store.asset_id_to_aas_ids.keys()): + self.persistent_store.asset_id_to_aas_ids[key].discard(aas_identifier) + return response_t() + + +if __name__ == "__main__": + from werkzeug.serving import run_simple + + run_simple("localhost", 8084, DiscoveryAPI(InMemoryDiscoveryStore()), + use_debugger=True, use_reloader=True) diff --git a/server/app/interfaces/registry.py b/server/app/interfaces/registry.py new file mode 100644 index 000000000..2d34bcbf6 --- /dev/null +++ b/server/app/interfaces/registry.py @@ -0,0 +1,290 @@ +""" +This module implements the Registry interface defined in the 'Specification of the Asset Administration Shell Part 2 – Application Programming Interface'. +""" + +from typing import Dict, Iterator, List, Type, Tuple + +import werkzeug.exceptions +import werkzeug.routing +import werkzeug.urls +import werkzeug.utils +from werkzeug.exceptions import Conflict, NotFound, BadRequest +from werkzeug.routing import MapAdapter, Rule, Submount +from werkzeug.wrappers import Request, Response + +import server.app.model as server_model +from basyx.aas import model +from server.app.util.converters import Base64URLConverter, base64url_decode +from server.app.interfaces.base import ObjectStoreWSGIApp, APIResponse, is_stripped_request, HTTPApiDecoder + + +class RegistryAPI(ObjectStoreWSGIApp): + def __init__(self, object_store: model.AbstractObjectStore, base_path: str = "/api/v3.0"): + self.object_store: model.AbstractObjectStore = object_store + self.url_map = werkzeug.routing.Map([ + Submount(base_path, [ + Rule("/description", methods=["GET"], endpoint=self.get_self_description), + Rule("/shell-descriptors", methods=["GET"], endpoint=self.get_all_aas_descriptors), + Rule("/shell-descriptors", methods=["POST"], endpoint=self.post_aas_descriptor), + Submount("/shell-descriptors", [ + Rule("/", methods=["GET"], endpoint=self.get_aas_descriptor_by_id), + Rule("/", methods=["PUT"], endpoint=self.put_aas_descriptor_by_id), + Rule("/", methods=["DELETE"], endpoint=self.delete_aas_descriptor_by_id), + Submount("/", [ + Rule("/submodel-descriptors", methods=["GET"], + endpoint=self.get_all_submodel_descriptors_through_superpath), + Rule("/submodel-descriptors", methods=["POST"], + endpoint=self.post_submodel_descriptor_through_superpath), + Submount("/submodel-descriptors", [ + Rule("/", methods=["GET"], + endpoint=self.get_submodel_descriptor_by_id_through_superpath), + Rule("/", methods=["PUT"], + endpoint=self.put_submodel_descriptor_by_id_through_superpath), + Rule("/", methods=["DELETE"], + endpoint=self.delete_submodel_descriptor_by_id_through_superpath), + ]) + ]) + ]), + Rule("/submodel-descriptors", methods=["GET"], endpoint=self.get_all_submodel_descriptors), + Rule("/submodel-descriptors", methods=["POST"], endpoint=self.post_submodel_descriptor), + Submount("/submodel-descriptors", [ + Rule("/", methods=["GET"], endpoint=self.get_submodel_descriptor_by_id), + Rule("/", methods=["PUT"], endpoint=self.put_submodel_descriptor_by_id), + Rule("/", methods=["DELETE"], + endpoint=self.delete_submodel_descriptor_by_id), + ]) + ]) + ], converters={ + "base64url": Base64URLConverter + }, strict_slashes=False) + + def _get_all_aas_descriptors(self, request: "Request") -> Tuple[ + Iterator[server_model.AssetAdministrationShellDescriptor], int]: + + descriptors: Iterator[server_model.AssetAdministrationShellDescriptor] = self._get_all_obj_of_type( + server_model.AssetAdministrationShellDescriptor + ) + + asset_kind = request.args.get("assetKind") + if asset_kind is not None: + try: + asset_kind = model.AssetKind[asset_kind] + except KeyError: + raise BadRequest(f"Invalid assetKind '{asset_kind}', must be one of {list(model.AssetKind.__members__)}") + descriptors = filter( + lambda desc: desc.asset_kind == asset_kind, + descriptors + ) + + asset_type = request.args.get("assetType") + if asset_type is not None: + asset_type = base64url_decode(asset_type) + try: + asset_type = model.Identifier(asset_type) + except Exception: + raise BadRequest(f"Invalid assetType: '{asset_type}'") + descriptors = filter( + lambda desc: desc.asset_type == asset_type, + descriptors + ) + + paginated_descriptors, end_index = self._get_slice(request, descriptors) + return paginated_descriptors, end_index + + def _get_aas_descriptor(self, url_args: Dict) -> server_model.AssetAdministrationShellDescriptor: + return self._get_obj_ts(url_args["aas_id"], server_model.AssetAdministrationShellDescriptor) + + def _get_all_submodel_descriptors(self, request: Request) -> Tuple[Iterator[server_model.SubmodelDescriptor], int]: + submodel_descriptors: Iterator[server_model.SubmodelDescriptor] = self._get_all_obj_of_type(server_model.SubmodelDescriptor) + paginated_submodel_descriptors, end_index = self._get_slice(request, submodel_descriptors) + return paginated_submodel_descriptors, end_index + + def _get_submodel_descriptor(self, url_args: Dict) -> server_model.SubmodelDescriptor: + return self._get_obj_ts(url_args["submodel_id"], server_model.SubmodelDescriptor) + + # ------ COMMON ROUTES ------- + def get_self_description(self, request: Request, url_args: Dict, response_t: Type[APIResponse], + **_kwargs) -> Response: + service_description = server_model.ServiceDescription(profiles=[ + server_model.ServiceSpecificationProfileEnum.AAS_REGISTRY_FULL, + server_model.ServiceSpecificationProfileEnum.AAS_REGISTRY_READ, + server_model.ServiceSpecificationProfileEnum.SUBMODEL_REGISTRY_FULL, + server_model.ServiceSpecificationProfileEnum.SUBMODEL_REGISTRY_READ + ]) + return response_t(service_description.to_dict()) + + # ------ AAS REGISTRY ROUTES ------- + def get_all_aas_descriptors(self, request: Request, url_args: Dict, response_t: Type[APIResponse], + **_kwargs) -> Response: + aas_descriptors, cursor = self._get_all_aas_descriptors(request) + return response_t(list(aas_descriptors), cursor=cursor) + + def post_aas_descriptor(self, request: Request, url_args: Dict, response_t: Type[APIResponse], + map_adapter: MapAdapter) -> Response: + descriptor = HTTPApiDecoder.request_body(request, server_model.AssetAdministrationShellDescriptor, False) + try: + self.object_store.add(descriptor) + except KeyError as e: + raise Conflict(f"AssetAdministrationShellDescriptor with Identifier {descriptor.id} already exists!") from e + descriptor.commit() + created_resource_url = map_adapter.build(self.get_aas_descriptor_by_id, { + "aas_id": descriptor.id + }, force_external=True) + return response_t(descriptor, status=201, headers={"Location": created_resource_url}) + + def get_aas_descriptor_by_id(self, request: Request, url_args: Dict, response_t: Type[APIResponse], + **_kwargs) -> Response: + descriptor = self._get_aas_descriptor(url_args) + return response_t(descriptor) + + def put_aas_descriptor_by_id(self, request: Request, url_args: Dict, response_t: Type[APIResponse], + **_kwargs) -> Response: + descriptor = self._get_aas_descriptor(url_args) + descriptor.update_from(HTTPApiDecoder.request_body(request, server_model.AssetAdministrationShellDescriptor, + is_stripped_request(request))) + descriptor.commit() + return response_t() + + def delete_aas_descriptor_by_id(self, request: Request, url_args: Dict, response_t: Type[APIResponse], + **_kwargs) -> Response: + descriptor = self._get_aas_descriptor(url_args) + self.object_store.remove(descriptor) + return response_t() + + def get_all_submodel_descriptors_through_superpath(self, + request: Request, + url_args: Dict, + response_t: Type[ + APIResponse], + **_kwargs) -> Response: + aas_descriptor = self._get_aas_descriptor(url_args) + submodel_descriptors, cursor = self._get_slice(request, + aas_descriptor.submodel_descriptors) + return response_t(list(submodel_descriptors), cursor=cursor) + + def get_submodel_descriptor_by_id_through_superpath(self, + request: Request, + url_args: Dict, + response_t: + Type[ + APIResponse], + **_kwargs) -> Response: + aas_descriptor = self._get_aas_descriptor(url_args) + submodel_id = url_args["submodel_id"] + submodel_descriptor = next( + (sd for sd in aas_descriptor.submodel_descriptors if + sd.id == submodel_id), None) + if submodel_descriptor is None: + raise NotFound( + f"Submodel Descriptor with Identifier {submodel_id} not found in AssetAdministrationShell!") + return response_t(submodel_descriptor) + + def post_submodel_descriptor_through_superpath(self, + request: Request, + url_args: Dict, + response_t: Type[ + APIResponse], + map_adapter: MapAdapter) -> Response: + aas_descriptor = self._get_aas_descriptor(url_args) + submodel_descriptor = HTTPApiDecoder.request_body(request, + server_model.SubmodelDescriptor, + is_stripped_request( + request)) + if any(sd.id == submodel_descriptor.id for sd in + aas_descriptor.submodel_descriptors): + raise Conflict( + f"Submodel Descriptor with Identifier {submodel_descriptor.id} already exists!") + aas_descriptor.submodel_descriptors.append(submodel_descriptor) + aas_descriptor.commit() + created_resource_url = map_adapter.build( + self.get_submodel_descriptor_by_id_through_superpath, { + "aas_id": aas_descriptor.id, + "submodel_id": submodel_descriptor.id + }, force_external=True) + return response_t(submodel_descriptor, status=201, + headers={"Location": created_resource_url}) + + def put_submodel_descriptor_by_id_through_superpath(self, + request: Request, + url_args: Dict, + response_t: + Type[ + APIResponse], + **_kwargs) -> Response: + aas_descriptor = self._get_aas_descriptor(url_args) + submodel_id = url_args["submodel_id"] + submodel_descriptor = next( + (sd for sd in aas_descriptor.submodel_descriptors if + sd.id == submodel_id), None) + if submodel_descriptor is None: + raise NotFound( + f"Submodel Descriptor with Identifier {submodel_id} not found in AssetAdministrationShell!") + submodel_descriptor.update_from( + HTTPApiDecoder.request_body(request, + server_model.SubmodelDescriptor, + is_stripped_request(request))) + aas_descriptor.commit() + return response_t() + + def delete_submodel_descriptor_by_id_through_superpath(self, + request: Request, + url_args: Dict, + response_t: + Type[ + APIResponse], + **_kwargs) -> Response: + aas_descriptor = self._get_aas_descriptor(url_args) + submodel_id = url_args["submodel_id"] + submodel_descriptor = next( + (sd for sd in aas_descriptor.submodel_descriptors if sd.id == submodel_id), None) + if submodel_descriptor is None: + raise NotFound(f"Submodel Descriptor with Identifier {submodel_id} not found in AssetAdministrationShell!") + aas_descriptor.submodel_descriptors.remove(submodel_descriptor) + aas_descriptor.commit() + return response_t() + + # ------ Submodel REGISTRY ROUTES ------- + def get_all_submodel_descriptors(self, request: Request, url_args: Dict, response_t: Type[APIResponse], + **_kwargs) -> Response: + submodel_descriptors, cursor = self._get_all_submodel_descriptors(request) + return response_t(list(submodel_descriptors), cursor=cursor, stripped=is_stripped_request(request)) + + def get_submodel_descriptor_by_id(self, request: Request, url_args: Dict, response_t: Type[APIResponse], + **_kwargs) -> Response: + submodel_descriptor = self._get_submodel_descriptor(url_args) + return response_t(submodel_descriptor, stripped=is_stripped_request(request)) + + def post_submodel_descriptor(self, request: Request, url_args: Dict, response_t: Type[APIResponse], + map_adapter: MapAdapter) -> Response: + submodel_descriptor = HTTPApiDecoder.request_body(request, server_model.SubmodelDescriptor, + is_stripped_request(request)) + try: + self.object_store.add(submodel_descriptor) + except KeyError as e: + raise Conflict(f"Submodel Descriptor with Identifier {submodel_descriptor.id} already exists!") from e + submodel_descriptor.commit() + created_resource_url = map_adapter.build(self.get_submodel_descriptor_by_id, { + "submodel_id": submodel_descriptor.id + }, force_external=True) + return response_t(submodel_descriptor, status=201, headers={"Location": created_resource_url}) + + def put_submodel_descriptor_by_id(self, request: Request, url_args: Dict, response_t: Type[APIResponse], + **_kwargs) -> Response: + submodel_descriptor = self._get_submodel_descriptor(url_args) + submodel_descriptor.update_from( + HTTPApiDecoder.request_body(request, server_model.SubmodelDescriptor, is_stripped_request(request))) + submodel_descriptor.commit() + return response_t() + + def delete_submodel_descriptor_by_id(self, request: Request, url_args: Dict, response_t: Type[APIResponse], + **_kwargs) -> Response: + self.object_store.remove(self._get_obj_ts(url_args["submodel_id"], server_model.SubmodelDescriptor)) + return response_t() + + +if __name__ == "__main__": + from werkzeug.serving import run_simple + from basyx.aas.examples.data.example_aas import create_full_example + + run_simple("localhost", 8083, RegistryAPI(create_full_example()), + use_debugger=True, use_reloader=True) diff --git a/sdk/basyx/aas/adapter/http.py b/server/app/interfaces/repository.py similarity index 70% rename from sdk/basyx/aas/adapter/http.py rename to server/app/interfaces/repository.py index 12bd533f3..2e78ded78 100644 --- a/sdk/basyx/aas/adapter/http.py +++ b/server/app/interfaces/repository.py @@ -34,403 +34,25 @@ - `GET /submodels/{submodelIdentifier}/submodel-elements/{idShortPath}/operation-results/{handleId}/$value` """ -import abc -import base64 -import binascii -import datetime -import enum import io import json -import itertools -import urllib +from typing import Type, Iterator, List, Dict, Union, Callable, Tuple, Optional -from lxml import etree import werkzeug.exceptions import werkzeug.routing -import werkzeug.urls import werkzeug.utils -from werkzeug.exceptions import BadRequest, Conflict, NotFound -from werkzeug.routing import MapAdapter, Rule, Submount -from werkzeug.wrappers import Request, Response +from werkzeug import Response, Request from werkzeug.datastructures import FileStorage +from werkzeug.exceptions import NotFound, BadRequest, Conflict +from werkzeug.routing import Submount, Rule, MapAdapter from basyx.aas import model -from ._generic import XML_NS_MAP -from .xml import XMLConstructables, read_aas_xml_element, xml_serialization, object_to_xml_element -from .json import AASToJsonEncoder, StrictAASFromJsonDecoder, StrictStrippedAASFromJsonDecoder -from . import aasx +from basyx.aas.adapter import aasx +from server.app.util.converters import Base64URLConverter, IdShortPathConverter, base64url_decode +from .base import ObjectStoreWSGIApp, APIResponse, is_stripped_request, HTTPApiDecoder, T -from typing import Callable, Dict, Iterable, Iterator, List, Optional, Type, TypeVar, Union, Tuple - -@enum.unique -class MessageType(enum.Enum): - UNDEFINED = enum.auto() - INFO = enum.auto() - WARNING = enum.auto() - ERROR = enum.auto() - EXCEPTION = enum.auto() - - def __str__(self): - return self.name.capitalize() - - -class Message: - def __init__(self, code: str, text: str, message_type: MessageType = MessageType.UNDEFINED, - timestamp: Optional[datetime.datetime] = None): - self.code: str = code - self.text: str = text - self.message_type: MessageType = message_type - self.timestamp: datetime.datetime = timestamp if timestamp is not None \ - else datetime.datetime.now(datetime.timezone.utc) - - -class Result: - def __init__(self, success: bool, messages: Optional[List[Message]] = None): - if messages is None: - messages = [] - self.success: bool = success - self.messages: List[Message] = messages - - -class ResultToJsonEncoder(AASToJsonEncoder): - @classmethod - def _result_to_json(cls, result: Result) -> Dict[str, object]: - return { - "success": result.success, - "messages": result.messages - } - - @classmethod - def _message_to_json(cls, message: Message) -> Dict[str, object]: - return { - "messageType": message.message_type, - "text": message.text, - "code": message.code, - "timestamp": message.timestamp.isoformat() - } - - def default(self, obj: object) -> object: - if isinstance(obj, Result): - return self._result_to_json(obj) - if isinstance(obj, Message): - return self._message_to_json(obj) - if isinstance(obj, MessageType): - return str(obj) - return super().default(obj) - - -class StrippedResultToJsonEncoder(ResultToJsonEncoder): - stripped = True - - -ResponseData = Union[Result, object, List[object]] - - -class APIResponse(abc.ABC, Response): - @abc.abstractmethod - def __init__(self, obj: Optional[ResponseData] = None, cursor: Optional[int] = None, - stripped: bool = False, *args, **kwargs): - super().__init__(*args, **kwargs) - if obj is None: - self.status_code = 204 - else: - self.data = self.serialize(obj, cursor, stripped) - - @abc.abstractmethod - def serialize(self, obj: ResponseData, cursor: Optional[int], stripped: bool) -> str: - pass - - -class JsonResponse(APIResponse): - def __init__(self, *args, content_type="application/json", **kwargs): - super().__init__(*args, **kwargs, content_type=content_type) - - def serialize(self, obj: ResponseData, cursor: Optional[int], stripped: bool) -> str: - if cursor is None: - data = obj - else: - data = { - "paging_metadata": {"cursor": str(cursor)}, - "result": obj - } - return json.dumps( - data, - cls=StrippedResultToJsonEncoder if stripped else ResultToJsonEncoder, - separators=(",", ":") - ) - - -class XmlResponse(APIResponse): - def __init__(self, *args, content_type="application/xml", **kwargs): - super().__init__(*args, **kwargs, content_type=content_type) - - def serialize(self, obj: ResponseData, cursor: Optional[int], stripped: bool) -> str: - root_elem = etree.Element("response", nsmap=XML_NS_MAP) - if cursor is not None: - root_elem.set("cursor", str(cursor)) - if isinstance(obj, Result): - result_elem = result_to_xml(obj, **XML_NS_MAP) - for child in result_elem: - root_elem.append(child) - elif isinstance(obj, list): - for item in obj: - item_elem = object_to_xml_element(item) - root_elem.append(item_elem) - else: - obj_elem = object_to_xml_element(obj) - for child in obj_elem: - root_elem.append(child) - etree.cleanup_namespaces(root_elem) - xml_str = etree.tostring(root_elem, xml_declaration=True, encoding="utf-8") - return xml_str # type: ignore[return-value] - - -class XmlResponseAlt(XmlResponse): - def __init__(self, *args, content_type="text/xml", **kwargs): - super().__init__(*args, **kwargs, content_type=content_type) - - -def result_to_xml(result: Result, **kwargs) -> etree._Element: - result_elem = etree.Element("result", **kwargs) - success_elem = etree.Element("success") - success_elem.text = xml_serialization.boolean_to_xml(result.success) - messages_elem = etree.Element("messages") - for message in result.messages: - messages_elem.append(message_to_xml(message)) - - result_elem.append(success_elem) - result_elem.append(messages_elem) - return result_elem - - -def message_to_xml(message: Message) -> etree._Element: - message_elem = etree.Element("message") - message_type_elem = etree.Element("messageType") - message_type_elem.text = str(message.message_type) - text_elem = etree.Element("text") - text_elem.text = message.text - code_elem = etree.Element("code") - code_elem.text = message.code - timestamp_elem = etree.Element("timestamp") - timestamp_elem.text = message.timestamp.isoformat() - - message_elem.append(message_type_elem) - message_elem.append(text_elem) - message_elem.append(code_elem) - message_elem.append(timestamp_elem) - return message_elem - - -def get_response_type(request: Request) -> Type[APIResponse]: - response_types: Dict[str, Type[APIResponse]] = { - "application/json": JsonResponse, - "application/xml": XmlResponse, - "text/xml": XmlResponseAlt - } - if len(request.accept_mimetypes) == 0 or request.accept_mimetypes.best in (None, "*/*"): - return JsonResponse - mime_type = request.accept_mimetypes.best_match(response_types) - if mime_type is None: - raise werkzeug.exceptions.NotAcceptable("This server supports the following content types: " - + ", ".join(response_types.keys())) - return response_types[mime_type] - - -def http_exception_to_response(exception: werkzeug.exceptions.HTTPException, response_type: Type[APIResponse]) \ - -> APIResponse: - headers = exception.get_headers() - location = exception.get_response().location - if location is not None: - headers.append(("Location", location)) - if exception.code and exception.code >= 400: - message = Message(type(exception).__name__, exception.description if exception.description is not None else "", - MessageType.ERROR) - result = Result(False, [message]) - else: - result = Result(False) - return response_type(result, status=exception.code, headers=headers) - - -def is_stripped_request(request: Request) -> bool: - level = request.args.get("level") - if level not in {"deep", "core", None}: - raise BadRequest(f"Level {level} is not a valid level!") - extent = request.args.get("extent") - if extent is not None: - raise werkzeug.exceptions.NotImplemented(f"The parameter extent is not yet implemented for this server!") - return level == "core" - - -T = TypeVar("T") - -BASE64URL_ENCODING = "utf-8" - - -def base64url_decode(data: str) -> str: - try: - # If the requester omits the base64 padding, an exception will be raised. - # However, Python doesn't complain about too much padding, - # thus we simply always append two padding characters (==). - # See also: https://stackoverflow.com/a/49459036/4780052 - decoded = base64.urlsafe_b64decode(data + "==").decode(BASE64URL_ENCODING) - except binascii.Error: - raise BadRequest(f"Encoded data {data} is invalid base64url!") - except UnicodeDecodeError: - raise BadRequest(f"Encoded base64url value is not a valid {BASE64URL_ENCODING} string!") - return decoded - - -def base64url_encode(data: str) -> str: - encoded = base64.urlsafe_b64encode(data.encode(BASE64URL_ENCODING)).decode("ascii") - return encoded - - -class HTTPApiDecoder: - # these are the types we can construct (well, only the ones we need) - type_constructables_map = { - model.AssetAdministrationShell: XMLConstructables.ASSET_ADMINISTRATION_SHELL, - model.AssetInformation: XMLConstructables.ASSET_INFORMATION, - model.ModelReference: XMLConstructables.MODEL_REFERENCE, - model.SpecificAssetId: XMLConstructables.SPECIFIC_ASSET_ID, - model.Qualifier: XMLConstructables.QUALIFIER, - model.Submodel: XMLConstructables.SUBMODEL, - model.SubmodelElement: XMLConstructables.SUBMODEL_ELEMENT, - model.Reference: XMLConstructables.REFERENCE - } - - @classmethod - def check_type_supportance(cls, type_: type): - if type_ not in cls.type_constructables_map: - raise TypeError(f"Parsing {type_} is not supported!") - - @classmethod - def assert_type(cls, obj: object, type_: Type[T]) -> T: - if not isinstance(obj, type_): - raise BadRequest(f"Object {obj!r} is not of type {type_.__name__}!") - return obj - - @classmethod - def json_list(cls, data: Union[str, bytes], expect_type: Type[T], stripped: bool, expect_single: bool) -> List[T]: - cls.check_type_supportance(expect_type) - decoder: Type[StrictAASFromJsonDecoder] = StrictStrippedAASFromJsonDecoder if stripped \ - else StrictAASFromJsonDecoder - try: - parsed = json.loads(data, cls=decoder) - if not isinstance(parsed, list): - if not expect_single: - raise BadRequest(f"Expected List[{expect_type.__name__}], got {parsed!r}!") - parsed = [parsed] - elif expect_single: - raise BadRequest(f"Expected a single object of type {expect_type.__name__}, got {parsed!r}!") - # TODO: the following is ugly, but necessary because references aren't self-identified objects - # in the json schema - # TODO: json deserialization will always create an ModelReference[Submodel], xml deserialization determines - # that automatically - constructor: Optional[Callable[..., T]] = None - args = [] - if expect_type is model.ModelReference: - constructor = decoder._construct_model_reference # type: ignore[assignment] - args.append(model.Submodel) - elif expect_type is model.AssetInformation: - constructor = decoder._construct_asset_information # type: ignore[assignment] - elif expect_type is model.SpecificAssetId: - constructor = decoder._construct_specific_asset_id # type: ignore[assignment] - elif expect_type is model.Reference: - constructor = decoder._construct_reference # type: ignore[assignment] - elif expect_type is model.Qualifier: - constructor = decoder._construct_qualifier # type: ignore[assignment] - - if constructor is not None: - # construct elements that aren't self-identified - return [constructor(obj, *args) for obj in parsed] - - except (KeyError, ValueError, TypeError, json.JSONDecodeError, model.AASConstraintViolation) as e: - raise BadRequest(str(e)) from e - - return [cls.assert_type(obj, expect_type) for obj in parsed] - - @classmethod - def base64urljson_list(cls, data: str, expect_type: Type[T], stripped: bool, expect_single: bool) -> List[T]: - data = base64url_decode(data) - return cls.json_list(data, expect_type, stripped, expect_single) - - @classmethod - def json(cls, data: Union[str, bytes], expect_type: Type[T], stripped: bool) -> T: - return cls.json_list(data, expect_type, stripped, True)[0] - - @classmethod - def base64urljson(cls, data: str, expect_type: Type[T], stripped: bool) -> T: - data = base64url_decode(data) - return cls.json_list(data, expect_type, stripped, True)[0] - - @classmethod - def xml(cls, data: bytes, expect_type: Type[T], stripped: bool) -> T: - cls.check_type_supportance(expect_type) - try: - xml_data = io.BytesIO(data) - rv = read_aas_xml_element(xml_data, cls.type_constructables_map[expect_type], - stripped=stripped, failsafe=False) - except (KeyError, ValueError) as e: - # xml deserialization creates an error chain. since we only return one error, return the root cause - f: BaseException = e - while f.__cause__ is not None: - f = f.__cause__ - raise BadRequest(str(f)) from e - except (etree.XMLSyntaxError, model.AASConstraintViolation) as e: - raise BadRequest(str(e)) from e - return cls.assert_type(rv, expect_type) - - @classmethod - def request_body(cls, request: Request, expect_type: Type[T], stripped: bool) -> T: - """ - TODO: werkzeug documentation recommends checking the content length before retrieving the body to prevent - running out of memory. but it doesn't state how to check the content length - also: what would be a reasonable maximum content length? the request body isn't limited by the xml/json - schema - In the meeting (25.11.2020) we discussed, this may refer to a reverse proxy in front of this WSGI app, - which should limit the maximum content length. - """ - valid_content_types = ("application/json", "application/xml", "text/xml") - - if request.mimetype not in valid_content_types: - raise werkzeug.exceptions.UnsupportedMediaType( - f"Invalid content-type: {request.mimetype}! Supported types: " - + ", ".join(valid_content_types)) - - if request.mimetype == "application/json": - return cls.json(request.get_data(), expect_type, stripped) - return cls.xml(request.get_data(), expect_type, stripped) - - -class Base64URLConverter(werkzeug.routing.UnicodeConverter): - - def to_url(self, value: model.Identifier) -> str: - return super().to_url(base64url_encode(value)) - - def to_python(self, value: str) -> model.Identifier: - value = super().to_python(value) - decoded = base64url_decode(super().to_python(value)) - return decoded - - -class IdShortPathConverter(werkzeug.routing.UnicodeConverter): - id_short_sep = "." - - def to_url(self, value: List[str]) -> str: - return super().to_url(self.id_short_sep.join(value)) - - def to_python(self, value: str) -> List[str]: - id_shorts = super().to_python(value).split(self.id_short_sep) - for id_short in id_shorts: - try: - model.Referable.validate_id_short(id_short) - except (ValueError, model.AASConstraintViolation): - raise BadRequest(f"{id_short} is not a valid id_short!") - return id_shorts - - -class WSGIApp: +class WSGIApp(ObjectStoreWSGIApp): def __init__(self, object_store: model.AbstractObjectStore, file_store: aasx.AbstractSupplementaryFileContainer, base_path: str = "/api/v3.0"): self.object_store: model.AbstractObjectStore = object_store @@ -488,8 +110,7 @@ def __init__(self, object_store: model.AbstractObjectStore, file_store: aasx.Abs Rule("/submodel-elements", methods=["POST"], endpoint=self.post_submodel_submodel_elements_id_short_path), Submount("/submodel-elements", [ - Rule("/$metadata", methods=["GET"], - endpoint=self.get_submodel_submodel_elements_metadata), + Rule("/$metadata", methods=["GET"], endpoint=self.get_submodel_submodel_elements_metadata), Rule("/$reference", methods=["GET"], endpoint=self.get_submodel_submodel_elements_reference), Rule("/$value", methods=["GET"], endpoint=self.not_implemented), @@ -525,10 +146,8 @@ def __init__(self, object_store: model.AbstractObjectStore, file_store: aasx.Abs Rule("/operation-status/", methods=["GET"], endpoint=self.not_implemented), Submount("/operation-results", [ - Rule("/", methods=["GET"], - endpoint=self.not_implemented), - Rule("//$value", methods=["GET"], - endpoint=self.not_implemented) + Rule("/", methods=["GET"], endpoint=self.not_implemented), + Rule("//$value", methods=["GET"], endpoint=self.not_implemented) ]), Rule("/qualifiers", methods=["GET"], endpoint=self.get_submodel_submodel_element_qualifiers), @@ -544,10 +163,8 @@ def __init__(self, object_store: model.AbstractObjectStore, file_store: aasx.Abs ]) ]) ]), - Rule("/qualifiers", methods=["GET"], - endpoint=self.get_submodel_submodel_element_qualifiers), - Rule("/qualifiers", methods=["POST"], - endpoint=self.post_submodel_submodel_element_qualifiers), + Rule("/qualifiers", methods=["GET"], endpoint=self.get_submodel_submodel_element_qualifiers), + Rule("/qualifiers", methods=["POST"], endpoint=self.post_submodel_submodel_element_qualifiers), Submount("/qualifiers", [ Rule("/", methods=["GET"], endpoint=self.get_submodel_submodel_element_qualifiers), @@ -571,24 +188,6 @@ def __init__(self, object_store: model.AbstractObjectStore, file_store: aasx.Abs "id_short_path": IdShortPathConverter }, strict_slashes=False) - # TODO: the parameters can be typed via builtin wsgiref with Python 3.11+ - def __call__(self, environ, start_response) -> Iterable[bytes]: - response: Response = self.handle_request(Request(environ)) - return response(environ, start_response) - - def _get_obj_ts(self, identifier: model.Identifier, type_: Type[model.provider._IT]) -> model.provider._IT: - identifiable = self.object_store.get(identifier) - if not isinstance(identifiable, type_): - raise NotFound(f"No {type_.__name__} with {identifier} found!") - identifiable.update() - return identifiable - - def _get_all_obj_of_type(self, type_: Type[model.provider._IT]) -> Iterator[model.provider._IT]: - for obj in self.object_store: - if isinstance(obj, type_): - obj.update() - yield obj - def _resolve_reference(self, reference: model.ModelReference[model.base._RT]) -> model.base._RT: try: return reference.resolve(self.object_store) @@ -651,21 +250,6 @@ def _get_submodel_reference(cls, aas: model.AssetAdministrationShell, submodel_i return ref raise NotFound(f"The AAS {aas!r} doesn't have a submodel reference to {submodel_id!r}!") - @classmethod - def _get_slice(cls, request: Request, iterator: Iterable[T]) -> Tuple[Iterator[T], int]: - limit_str = request.args.get('limit', default="10") - cursor_str = request.args.get('cursor', default="1") - try: - limit, cursor = int(limit_str), int(cursor_str) - 1 # cursor is 1-indexed - if limit < 0 or cursor < 0: - raise ValueError - except ValueError: - raise BadRequest("Limit can not be negative, cursor must be positive!") - start_index = cursor - end_index = cursor + limit - paginated_slice = itertools.islice(iterator, start_index, end_index) - return paginated_slice, end_index - def _get_shells(self, request: Request) -> Tuple[Iterator[model.AssetAdministrationShell], int]: aas: Iterator[model.AssetAdministrationShell] = self._get_all_obj_of_type(model.AssetAdministrationShell) @@ -713,7 +297,7 @@ def _get_submodels(self, request: Request) -> Tuple[Iterator[model.Submodel], in submodels = filter(lambda sm: sm.id_short == id_short, submodels) semantic_id = request.args.get("semanticId") if semantic_id is not None: - spec_semantic_id = HTTPApiDecoder.base64urljson( + spec_semantic_id = HTTPApiDecoder.base64url_json( semantic_id, model.Reference, False) # type: ignore[type-abstract] submodels = filter(lambda sm: sm.semantic_id == spec_semantic_id, submodels) paginated_submodels, end_index = self._get_slice(request, submodels) @@ -737,22 +321,6 @@ def _get_submodel_submodel_elements_id_short_path(self, url_args: Dict) -> model def _get_concept_description(self, url_args): return self._get_obj_ts(url_args["concept_id"], model.ConceptDescription) - def handle_request(self, request: Request): - map_adapter: MapAdapter = self.url_map.bind_to_environ(request.environ) - try: - response_t = get_response_type(request) - except werkzeug.exceptions.NotAcceptable as e: - return e - - try: - endpoint, values = map_adapter.match() - return endpoint(request, values, response_t=response_t, map_adapter=map_adapter) - - # any raised error that leaves this function will cause a 500 internal server error - # so catch raised http exceptions and return them - except werkzeug.exceptions.HTTPException as e: - return http_exception_to_response(e, response_t) - # ------ all not implemented ROUTES ------- def not_implemented(self, request: Request, url_args: Dict, **_kwargs) -> Response: raise werkzeug.exceptions.NotImplemented("This route is not implemented!") diff --git a/server/app/main.py b/server/app/main.py index c502bfbe0..de9eea065 100644 --- a/server/app/main.py +++ b/server/app/main.py @@ -6,7 +6,7 @@ from basyx.aas.adapter import aasx from basyx.aas.backend.local_file import LocalFileObjectStore -from basyx.aas.adapter.http import WSGIApp +from server.app.interfaces.repository import WSGIApp storage_path = os.getenv("STORAGE_PATH", "/storage") storage_type = os.getenv("STORAGE_TYPE", "LOCAL_FILE_READ_ONLY") diff --git a/server/app/model/__init__.py b/server/app/model/__init__.py new file mode 100644 index 000000000..0f5e5d953 --- /dev/null +++ b/server/app/model/__init__.py @@ -0,0 +1,3 @@ +from .descriptor import * +from .endpoint import * +from .service import * diff --git a/server/app/model/descriptor.py b/server/app/model/descriptor.py new file mode 100644 index 000000000..38276cd2d --- /dev/null +++ b/server/app/model/descriptor.py @@ -0,0 +1,109 @@ +from __future__ import absolute_import + +import abc +from typing import Optional, Iterable, List + +from basyx.aas import model +from server.app.model.endpoint import Endpoint + + +class Descriptor(model.HasExtension, metaclass=abc.ABCMeta): + @abc.abstractmethod + def __init__(self, description: Optional[model.MultiLanguageTextType] = None, + display_name: Optional[model.MultiLanguageNameType] = None, extension: Iterable[model.Extension] = ()): + super().__init__() + self.description: Optional[model.MultiLanguageTextType] = description + self.display_name: Optional[model.MultiLanguageNameType] = display_name + self.extension = model.NamespaceSet(self, [("name", True)], extension) + + def commit(self): + pass + + def update(self): + pass + + def update_from(self, other: "Descriptor", update_source: bool = False): + """ + Updates the descriptor's attributes from another descriptor. + + :param other: The descriptor to update from. + :param update_source: Placeholder for compatibility; not used in this context. + """ + for attr in vars(other): + if attr == "id": + continue # Skip updating the unique identifier of the AAS + setattr(self, attr, getattr(other, attr)) + + +class SubmodelDescriptor(Descriptor): + + def __init__(self, id_: model.Identifier, endpoints: List[Endpoint], + administration: Optional[model.AdministrativeInformation] = None, + id_short: Optional[model.NameType] = None, semantic_id: Optional[model.Reference] = None, + supplemental_semantic_id: Iterable[model.Reference] = ()): + super().__init__() + self.id: model.Identifier = id_ + self.endpoints: List[Endpoint] = endpoints + self.administration: Optional[model.AdministrativeInformation] = administration + self.id_short: Optional[model.NameType] = id_short + self.semantic_id: Optional[model.Reference] = semantic_id + self.supplemental_semantic_id: model.ConstrainedList[model.Reference] = \ + model.ConstrainedList(supplemental_semantic_id) + + +class AssetAdministrationShellDescriptor(Descriptor): + + def __init__(self, + id_: model.Identifier, + administration: Optional[model.AdministrativeInformation] = None, + asset_kind: Optional[model.AssetKind] = None, + asset_type: Optional[model.Identifier] = None, + endpoints: Optional[List[Endpoint]] = None, + global_asset_id: Optional[model.Identifier] = None, + id_short: Optional[model.NameType] = None, + specific_asset_id: Iterable[model.SpecificAssetId] = (), + submodel_descriptors: Optional[List[SubmodelDescriptor]] = None, + description: Optional[model.MultiLanguageTextType] = None, + display_name: Optional[model.MultiLanguageNameType] = None, + extension: Iterable[model.Extension] = ()): + """AssetAdministrationShellDescriptor - + + Nur das 'id'-Feld (id_) ist zwingend erforderlich. Alle anderen Felder erhalten Defaultwerte. + """ + super().__init__() + self.administration: Optional[model.AdministrativeInformation] = administration + self.asset_kind: Optional[model.AssetKind] = asset_kind + self.asset_type: Optional[model.Identifier] = asset_type + self.endpoints: Optional[ + List[Endpoint]] = endpoints if endpoints is not None else [] # leere Liste, falls nicht gesetzt + self.global_asset_id: Optional[model.Identifier] = global_asset_id + self.id_short: Optional[model.NameType] = id_short + self.id: model.Identifier = id_ + self._specific_asset_id: model.ConstrainedList[model.SpecificAssetId] = model.ConstrainedList( + specific_asset_id, + item_set_hook=self._check_constraint_set_spec_asset_id, + item_del_hook=self._check_constraint_del_spec_asset_id + ) + self.submodel_descriptors = submodel_descriptors if submodel_descriptors is not None else [] + self.description: Optional[model.MultiLanguageTextType] = description + self.display_name: Optional[model.MultiLanguageNameType] = display_name + self.extension = model.NamespaceSet(self, [("name", True)], extension) + + @property + def specific_asset_id(self) -> model.ConstrainedList[model.SpecificAssetId]: + return self._specific_asset_id + + @specific_asset_id.setter + def specific_asset_id(self, specific_asset_id: Iterable[model.SpecificAssetId]) -> None: + # constraints are checked via _check_constraint_set_spec_asset_id() in this case + self._specific_asset_id[:] = specific_asset_id + + def _check_constraint_set_spec_asset_id(self, items_to_replace: List[model.SpecificAssetId], + new_items: List[model.SpecificAssetId], + old_list: List[model.SpecificAssetId]) -> None: + model.AssetInformation._validate_aasd_131(self.global_asset_id, + len(old_list) - len(items_to_replace) + len(new_items) > 0) + + def _check_constraint_del_spec_asset_id(self, _item_to_del: model.SpecificAssetId, + old_list: List[model.SpecificAssetId]) -> None: + model.AssetInformation._validate_aasd_131(self.global_asset_id, len(old_list) > 1) diff --git a/server/app/model/endpoint.py b/server/app/model/endpoint.py new file mode 100644 index 000000000..3be6dc061 --- /dev/null +++ b/server/app/model/endpoint.py @@ -0,0 +1,107 @@ +from __future__ import absolute_import + +import re +from enum import Enum +from typing import Optional, List + +from basyx.aas.model import base + + +class AssetLink: + def __init__(self, name: base.LabelType, value: base.Identifier): + if not name: + raise ValueError("AssetLink 'name' must be a non-empty string.") + if not value: + raise ValueError("AssetLink 'value' must be a non-empty string.") + self.name = name + self.value = value + + +class SecurityTypeEnum(Enum): + NONE = "NONE" + RFC_TLSA = "RFC_TLSA" + W3C_DID = "W3C_DID" + + +class SecurityAttributeObject: + def __init__(self, type_: SecurityTypeEnum, key: str, value: str): + + if not isinstance(type_, SecurityTypeEnum): + raise ValueError(f"Invalid security type: {type_}. Must be one of {list(SecurityTypeEnum)}") + if not key or not isinstance(key, str): + raise ValueError("Key must be a non-empty string.") + if not value or not isinstance(value, str): + raise ValueError("Value must be a non-empty string.") + self.type = type_ + self.key = key + self.value = value + + +class ProtocolInformation: + + def __init__( + self, + href: str, + endpoint_protocol: Optional[str] = None, + endpoint_protocol_version: Optional[List[str]] = None, + subprotocol: Optional[str] = None, + subprotocol_body: Optional[str] = None, + subprotocol_body_encoding: Optional[str] = None, + security_attributes: Optional[List[SecurityAttributeObject]] = None + ): + if not href or not isinstance(href, str): + raise ValueError("href must be a non-empty string representing a valid URL.") + + self.href = href + self.endpoint_protocol = endpoint_protocol + self.endpoint_protocol_version = endpoint_protocol_version or [] + self.subprotocol = subprotocol + self.subprotocol_body = subprotocol_body + self.subprotocol_body_encoding = subprotocol_body_encoding + self.security_attributes = security_attributes or [] + + +class Endpoint: + INTERFACE_SHORTNAMES = { + "AAS", "SUBMODEL", "SERIALIZE", "AASX-FILE", "AAS-REGISTRY", + "SUBMODEL-REGISTRY", "AAS-REPOSITORY", "SUBMODEL-REPOSITORY", + "CD-REPOSITORY", "AAS-DISCOVERY" + } + VERSION_PATTERN = re.compile(r"^\d+(\.\d+)*$") + + def __init__(self, interface: base.NameType, protocol_information: ProtocolInformation): # noqa: E501 + + self.interface = interface + self.protocol_information = protocol_information + + @property + def interface(self) -> str: + return self._interface + + @interface.setter + def interface(self, interface: base.NameType): + if interface is None: + raise ValueError("Invalid value for `interface`, must not be `None`") + if not self.is_valid_interface(interface): + raise ValueError(f"Invalid interface format: {interface}. Expected format: '-', ") + + self._interface = interface + + @classmethod + def is_valid_interface(cls, interface: base.NameType) -> bool: + parts = interface.split("-", 1) + if len(parts) != 2: + return False + short_name, version = parts + return short_name in cls.INTERFACE_SHORTNAMES and cls.VERSION_PATTERN.match(version) + + @property + def protocol_information(self) -> ProtocolInformation: + return self._protocol_information + + @protocol_information.setter + def protocol_information(self, protocol_information: ProtocolInformation): + if protocol_information is None: + raise ValueError("Invalid value for `protocol_information`, must not be `None`") # noqa: E501 + + self._protocol_information = protocol_information diff --git a/server/app/model/service.py b/server/app/model/service.py new file mode 100644 index 000000000..39bc3dc03 --- /dev/null +++ b/server/app/model/service.py @@ -0,0 +1,21 @@ +from typing import List +from enum import Enum + +class ServiceSpecificationProfileEnum(str, Enum): + AAS_REGISTRY_FULL = "https://adminshell.io/aas/API/3/0/AssetAdministrationShellRegistryServiceSpecification/SSP-001" + AAS_REGISTRY_READ = "https://adminshell.io/aas/API/3/0/AssetAdministrationShellRegistryServiceSpecification/SSP-002" + SUBMODEL_REGISTRY_FULL = "https://adminshell.io/aas/API/3/0/SubmodelRegistryServiceSpecification/SSP-001" + SUBMODEL_REGISTRY_READ = "https://adminshell.io/aas/API/3/0/SubmodelRegistryServiceSpecification/SSP-002" + #TODO add other profiles + + +class ServiceDescription: + def __init__(self, profiles: List[ServiceSpecificationProfileEnum]): + if not profiles: + raise ValueError("At least one profile must be specified") + self.profiles = profiles + + def to_dict(self): + return { + "profiles": [p.value for p in self.profiles] + } \ No newline at end of file diff --git a/server/app/py.typed b/server/app/py.typed new file mode 100644 index 000000000..e69de29bb diff --git a/server/app/util/__init__.py b/server/app/util/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/server/app/util/converters.py b/server/app/util/converters.py new file mode 100644 index 000000000..c79ded3c2 --- /dev/null +++ b/server/app/util/converters.py @@ -0,0 +1,63 @@ +# Copyright (c) 2025 the Eclipse BaSyx Authors +# +# This program and the accompanying materials are made available under the terms of the MIT License, available in +# the LICENSE file of this project. +# +# SPDX-License-Identifier: MIT +import base64 +import binascii + +import werkzeug.routing +import werkzeug.utils +from werkzeug.exceptions import BadRequest + +from basyx.aas import model + +from typing import List + +BASE64URL_ENCODING = "utf-8" + + +def base64url_decode(data: str) -> str: + try: + # If the requester omits the base64 padding, an exception will be raised. + # However, Python doesn't complain about too much padding, + # thus we simply always append two padding characters (==). + # See also: https://stackoverflow.com/a/49459036/4780052 + decoded = base64.urlsafe_b64decode(data + "==").decode(BASE64URL_ENCODING) + except binascii.Error: + raise BadRequest(f"Encoded data {data} is invalid base64url!") + except UnicodeDecodeError: + raise BadRequest(f"Encoded base64url value is not a valid {BASE64URL_ENCODING} string!") + return decoded + + +def base64url_encode(data: str) -> str: + encoded = base64.urlsafe_b64encode(data.encode(BASE64URL_ENCODING)).decode("ascii") + return encoded + + +class Base64URLConverter(werkzeug.routing.UnicodeConverter): + def to_url(self, value: model.Identifier) -> str: + return super().to_url(base64url_encode(value)) + + def to_python(self, value: str) -> model.Identifier: + value = super().to_python(value) + decoded = base64url_decode(super().to_python(value)) + return decoded + + +class IdShortPathConverter(werkzeug.routing.UnicodeConverter): + id_short_sep = "." + + def to_url(self, value: List[str]) -> str: + return super().to_url(self.id_short_sep.join(value)) + + def to_python(self, value: str) -> List[str]: + id_shorts = super().to_python(value).split(self.id_short_sep) + for id_short in id_shorts: + try: + model.Referable.validate_id_short(id_short) + except (ValueError, model.AASConstraintViolation): + raise BadRequest(f"{id_short} is not a valid id_short!") + return id_shorts diff --git a/server/app/version.py b/server/app/version.py new file mode 100644 index 000000000..844ef354c --- /dev/null +++ b/server/app/version.py @@ -0,0 +1,21 @@ +# file generated by setuptools-scm +# don't change, don't track in version control + +__all__ = ["__version__", "__version_tuple__", "version", "version_tuple"] + +TYPE_CHECKING = False +if TYPE_CHECKING: + from typing import Tuple + from typing import Union + + VERSION_TUPLE = Tuple[Union[int, str], ...] +else: + VERSION_TUPLE = object + +version: str +__version__: str +__version_tuple__: VERSION_TUPLE +version_tuple: VERSION_TUPLE + +__version__ = version = '0.1.dev2109+g5f9e7d2' +__version_tuple__ = version_tuple = (0, 1, 'dev2109', 'g5f9e7d2') diff --git a/server/pyproject.toml b/server/pyproject.toml new file mode 100644 index 000000000..7a6af3106 --- /dev/null +++ b/server/pyproject.toml @@ -0,0 +1,63 @@ +[build-system] +requires = [ + "setuptools>=45", + "wheel", + "setuptools_scm[toml]>=6.2" +] +build-backend = "setuptools.build_meta" + +[tool.setuptools_scm] +# Configure setuptools_scm for version management: +# - Automatically infers the version number from the most recent git tag +# - Generates a version.py file in the package directory +# - Allows for automatic versioning between releases (e.g., 1.0.1.dev4+g12345) +# If you want to use the version anywhere in the code, use +# ``` +# from basyx.version import version +# print(f"Project version: {version}") +# ``` +root = ".." # Defines the path to the root of the repository +version_file = "app/version.py" + +[project] +name = "basyx-python-server" +dynamic = ["version"] +description = "The Eclipse BaSyx Python Server, an implementation of the BaSyx AAS Server" #FIXME +authors = [ + { name = "The Eclipse BaSyx Authors", email = "admins@iat.rwth-aachen.de" } +] +readme = "README.md" +license = { file = "LICENSE" } +classifiers = [ + "Programming Language :: Python :: 3", + "License :: OSI Approved :: MIT License", + "Operating System :: OS Independent", + "Development Status :: 5 - Production/Stable" +] +requires-python = ">=3.9" +dependencies = [ + "basyx-python-sdk", #TODO: Think about the version + "urllib3>=1.26,<3", + "Werkzeug>=3.0.3,<4", +] + +[project.optional-dependencies] +dev = [ + "mypy", + "pycodestyle", + "codeblocks", + "coverage", + "schemathesis~=3.7", + "jsonschema~=4.7", + "hypothesis~=6.13", + "types-python-dateutil", +] + +[project.urls] +"Homepage" = "https://github.com/eclipse-basyx/basyx-python-sdk" + +[tool.setuptools] +packages = { find = { include = ["basyx*"], exclude = ["test*"] } } + +[tool.setuptools.package-data] +app = ["py.typed"] diff --git a/server/test/__init__.py b/server/test/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/server/test/interfaces/__init__.py b/server/test/interfaces/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/sdk/test/adapter/test_http.py b/server/test/interfaces/test_repository.py similarity index 98% rename from sdk/test/adapter/test_http.py rename to server/test/interfaces/test_repository.py index 09dadf865..5177dfacb 100644 --- a/sdk/test/adapter/test_http.py +++ b/server/test/interfaces/test_repository.py @@ -1,4 +1,4 @@ -# Copyright (c) 2024 the Eclipse BaSyx Authors +# Copyright (c) 2025 the Eclipse BaSyx Authors # # This program and the accompanying materials are made available under the terms of the MIT License, available in # the LICENSE file of this project. @@ -34,7 +34,7 @@ from basyx.aas import model from basyx.aas.adapter.aasx import DictSupplementaryFileContainer -from basyx.aas.adapter.http import WSGIApp +from server.app.interfaces.repository import WSGIApp from basyx.aas.examples.data.example_aas import create_full_example from typing import Set