Skip to content

Commit

Permalink
♻️ Labels for metrics scraping (#3881)
Browse files Browse the repository at this point in the history
  • Loading branch information
GitHK authored Feb 20, 2023
1 parent 3b883ba commit 1da8dab
Show file tree
Hide file tree
Showing 11 changed files with 160 additions and 48 deletions.
31 changes: 30 additions & 1 deletion packages/models-library/src/models_library/docker.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,11 @@
import re
from typing import Optional

from pydantic import ConstrainedStr, constr
from models_library.generated_models.docker_rest_api import Task
from models_library.projects import ProjectID
from models_library.projects_nodes import NodeID
from models_library.users import UserID
from pydantic import BaseModel, ConstrainedStr, Field, constr

from .basic_regex import (
DOCKER_GENERIC_TAG_KEY_RE,
Expand All @@ -23,3 +27,28 @@ class DockerLabelKey(ConstrainedStr):
class DockerGenericTag(ConstrainedStr):
# NOTE: https://docs.docker.com/engine/reference/commandline/tag/#description
regex: Optional[re.Pattern[str]] = DOCKER_GENERIC_TAG_KEY_RE


class SimcoreServiceDockerLabelKeys(BaseModel):
# NOTE: in a next PR, this should be moved to packages models-library and used
# all over, and aliases should use io.simcore.service.*
# https://github.com/ITISFoundation/osparc-simcore/issues/3638

user_id: UserID = Field(..., alias="user_id")
project_id: ProjectID = Field(..., alias="study_id")
node_id: NodeID = Field(..., alias="uuid")

def to_docker_labels(self) -> dict[str, str]:
"""returns a dictionary of strings as required by docker"""
std_export = self.dict(by_alias=True)
return {k: f"{v}" for k, v in std_export.items()}

@classmethod
def from_docker_task(cls, docker_task: Task) -> "SimcoreServiceDockerLabelKeys":
assert docker_task.Spec # nosec
assert docker_task.Spec.ContainerSpec # nosec
task_labels = docker_task.Spec.ContainerSpec.Labels or {}
return cls.parse_obj(task_labels)

class Config:
allow_population_by_field_name = True
25 changes: 24 additions & 1 deletion packages/models-library/tests/test_docker.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,14 @@
# pylint: disable=unused-argument
# pylint: disable=unused-variable


import pytest
from models_library.docker import DockerGenericTag, DockerLabelKey
from faker import Faker
from models_library.docker import (
DockerGenericTag,
DockerLabelKey,
SimcoreServiceDockerLabelKeys,
)
from pydantic import ValidationError, parse_obj_as


Expand Down Expand Up @@ -93,3 +99,20 @@ def test_docker_generic_tag(image_name: str, valid: bool):
else:
with pytest.raises(ValidationError):
parse_obj_as(DockerGenericTag, image_name)


@pytest.fixture
def osparc_docker_label_keys(
faker: Faker,
) -> SimcoreServiceDockerLabelKeys:
return SimcoreServiceDockerLabelKeys.parse_obj(
dict(user_id=faker.pyint(), project_id=faker.uuid4(), node_id=faker.uuid4())
)


def test_osparc_docker_label_keys_to_docker_labels(
osparc_docker_label_keys: SimcoreServiceDockerLabelKeys,
):
exported_dict = osparc_docker_label_keys.to_docker_labels()
assert all(isinstance(v, str) for v in exported_dict.values())
assert parse_obj_as(SimcoreServiceDockerLabelKeys, exported_dict)
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,10 @@ class EC2InstancesSettings(BaseCustomSettings):
"disabled when set to 0. Uses 1st machine defined in EC2_INSTANCES_ALLOWED_TYPES",
)

EC2_INSTANCES_MAX_START_TIME: datetime.timedelta = Field(default=datetime.timedelta(minutes=3), description="Usual time taken an EC2 instance with the given AMI takes to be in 'running' mode")
EC2_INSTANCES_MAX_START_TIME: datetime.timedelta = Field(
default=datetime.timedelta(minutes=3),
description="Usual time taken an EC2 instance with the given AMI takes to be in 'running' mode",
)

@validator("EC2_INSTANCES_TIME_BEFORE_TERMINATION")
@classmethod
Expand Down
32 changes: 2 additions & 30 deletions services/autoscaling/src/simcore_service_autoscaling/models.py
Original file line number Diff line number Diff line change
@@ -1,39 +1,11 @@
import datetime
from dataclasses import dataclass, field

from models_library.generated_models.docker_rest_api import Node, Task
from models_library.projects import ProjectID
from models_library.projects_nodes import NodeID
from models_library.users import UserID
from pydantic import BaseModel, ByteSize, Field, NonNegativeFloat, PositiveInt
from models_library.generated_models.docker_rest_api import Node
from pydantic import BaseModel, ByteSize, NonNegativeFloat, PositiveInt
from types_aiobotocore_ec2.literals import InstanceStateNameType, InstanceTypeType


class SimcoreServiceDockerLabelKeys(BaseModel):
# NOTE: in a next PR, this should be moved to packages models-library and used
# all over, and aliases should use io.simcore.service.*
# https://github.com/ITISFoundation/osparc-simcore/issues/3638

user_id: UserID = Field(..., alias="user_id")
project_id: ProjectID = Field(..., alias="study_id")
node_id: NodeID = Field(..., alias="uuid")

def to_docker_labels(self) -> dict[str, str]:
"""returns a dictionary of strings as required by docker"""
std_export = self.dict(by_alias=True)
return {k: f"{v}" for k, v in std_export.items()}

@classmethod
def from_docker_task(cls, docker_task: Task) -> "SimcoreServiceDockerLabelKeys":
assert docker_task.Spec # nosec
assert docker_task.Spec.ContainerSpec # nosec
task_labels = docker_task.Spec.ContainerSpec.Labels or {}
return cls.parse_obj(task_labels)

class Config:
allow_population_by_field_name = True


class Resources(BaseModel):
cpus: NonNegativeFloat
ram: ByteSize
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import logging

from fastapi import FastAPI
from models_library.docker import SimcoreServiceDockerLabelKeys
from models_library.generated_models.docker_rest_api import Task
from models_library.rabbitmq_messages import (
LoggerRabbitMessage,
Expand All @@ -13,7 +14,7 @@
from servicelib.logging_utils import log_catch

from ..core.settings import ApplicationSettings
from ..models import Cluster, SimcoreServiceDockerLabelKeys
from ..models import Cluster
from ..modules.docker import AutoscalingDocker, get_docker_client
from ..modules.rabbitmq import post_message
from . import utils_docker
Expand Down
3 changes: 1 addition & 2 deletions services/autoscaling/tests/unit/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
from faker import Faker
from fakeredis.aioredis import FakeRedis
from fastapi import FastAPI
from models_library.docker import DockerLabelKey
from models_library.docker import DockerLabelKey, SimcoreServiceDockerLabelKeys
from models_library.generated_models.docker_rest_api import (
Availability,
Node,
Expand All @@ -50,7 +50,6 @@
from settings_library.rabbit import RabbitSettings
from simcore_service_autoscaling.core.application import create_app
from simcore_service_autoscaling.core.settings import ApplicationSettings, EC2Settings
from simcore_service_autoscaling.models import SimcoreServiceDockerLabelKeys
from simcore_service_autoscaling.modules.docker import AutoscalingDocker
from simcore_service_autoscaling.modules.ec2 import AutoscalingEC2, EC2InstanceData
from tenacity import retry
Expand Down
11 changes: 2 additions & 9 deletions services/autoscaling/tests/unit/test_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,10 @@

import aiodocker
import pytest
from models_library.docker import SimcoreServiceDockerLabelKeys
from models_library.generated_models.docker_rest_api import Service, Task
from pydantic import ByteSize, ValidationError, parse_obj_as
from simcore_service_autoscaling.models import Resources, SimcoreServiceDockerLabelKeys
from simcore_service_autoscaling.models import Resources


@pytest.mark.parametrize(
Expand Down Expand Up @@ -109,14 +110,6 @@ async def test_get_simcore_service_docker_labels_from_task_with_missing_labels_r
SimcoreServiceDockerLabelKeys.from_docker_task(service_tasks[0])


def test_osparc_docker_label_keys_to_docker_labels(
osparc_docker_label_keys: SimcoreServiceDockerLabelKeys,
):
exported_dict = osparc_docker_label_keys.to_docker_labels()
assert all(isinstance(v, str) for v in exported_dict.values())
assert parse_obj_as(SimcoreServiceDockerLabelKeys, exported_dict)


async def test_get_simcore_service_docker_labels(
async_docker_client: aiodocker.Docker,
create_service: Callable[[dict[str, Any], dict[str, str], str], Awaitable[Service]],
Expand Down
3 changes: 1 addition & 2 deletions services/autoscaling/tests/unit/test_utils_rabbitmq.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
import aiodocker
from faker import Faker
from fastapi import FastAPI
from models_library.docker import DockerLabelKey
from models_library.docker import DockerLabelKey, SimcoreServiceDockerLabelKeys
from models_library.generated_models.docker_rest_api import Service, Task
from models_library.rabbitmq_messages import (
LoggerRabbitMessage,
Expand All @@ -20,7 +20,6 @@
from pytest_mock.plugin import MockerFixture
from servicelib.rabbitmq import RabbitMQClient
from settings_library.rabbit import RabbitSettings
from simcore_service_autoscaling.models import SimcoreServiceDockerLabelKeys
from simcore_service_autoscaling.utils.rabbitmq import (
post_task_log_message,
post_task_progress_message,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,9 @@
from typing import Optional, Union

from fastapi.applications import FastAPI
from models_library.docker import SimcoreServiceDockerLabelKeys
from models_library.projects import ProjectID
from models_library.projects_nodes_io import NodeID
from models_library.service_settings_labels import (
ComposeSpecLabel,
PathMappingsLabel,
Expand Down Expand Up @@ -182,6 +185,25 @@ def _update_resource_limits_and_reservations(
spec["environment"] = environment


def _update_container_labels(
service_spec: ComposeSpecLabel,
user_id: UserID,
project_id: ProjectID,
node_id: NodeID,
) -> None:
for spec in service_spec["services"].values():
labels: list[str] = spec.setdefault("labels", [])

label_keys = SimcoreServiceDockerLabelKeys(
user_id=user_id, study_id=project_id, uuid=node_id
)
docker_labels = [f"{k}={v}" for k, v in label_keys.to_docker_labels().items()]

for docker_label in docker_labels:
if docker_label not in labels:
labels.append(docker_label)


def assemble_spec(
*,
app: FastAPI,
Expand All @@ -197,6 +219,8 @@ def assemble_spec(
allow_internet_access: bool,
product_name: str,
user_id: UserID,
project_id: ProjectID,
node_id: NodeID,
) -> str:
"""
returns a docker-compose spec used by
Expand Down Expand Up @@ -255,8 +279,14 @@ def assemble_spec(
egress_proxy_settings=egress_proxy_settings,
)

_update_container_labels(
service_spec=service_spec,
user_id=user_id,
project_id=project_id,
node_id=node_id,
)

# TODO: will be used in next PR
assert user_id # nosec
assert product_name # nosec

stringified_service_spec = replace_env_vars_in_compose_spec(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -480,7 +480,10 @@ async def action(cls, app: FastAPI, scheduler_data: SchedulerData) -> None:
allow_internet_access=allow_internet_access,
product_name=scheduler_data.product_name,
user_id=scheduler_data.user_id,
project_id=scheduler_data.project_id,
node_id=scheduler_data.node_uuid,
)

logger.debug(
"Starting containers %s with compose-specs:\n%s",
scheduler_data.service_name,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,18 @@


from typing import Any
from uuid import uuid4

import pytest
import yaml
from models_library.projects import ProjectID
from models_library.projects_nodes_io import NodeID
from models_library.services_resources import (
DEFAULT_SINGLE_SERVICE_NAME,
ResourcesDict,
ServiceResourcesDict,
)
from models_library.users import UserID
from pydantic import parse_obj_as
from servicelib.resources import CPU_RESOURCE_LIMIT_KEY, MEM_RESOURCE_LIMIT_KEY
from simcore_service_director_v2.modules.dynamic_sidecar import docker_compose_specs
Expand Down Expand Up @@ -139,3 +143,59 @@ async def test_inject_resource_limits_and_reservations(
in spec["environment"]
)
assert f"{MEM_RESOURCE_LIMIT_KEY}={memory.limit}" in spec["environment"]


USER_ID: UserID = 1
PROJECT_ID: ProjectID = uuid4()
NODE_ID: NodeID = uuid4()


@pytest.mark.parametrize(
"service_spec, expected_result",
[
pytest.param(
{"services": {"service-1": {}}},
{
"services": {
"service-1": {
"labels": [
f"user_id={USER_ID}",
f"study_id={PROJECT_ID}",
f"uuid={NODE_ID}",
]
}
}
},
id="single_service",
),
pytest.param(
{"services": {"service-1": {}, "service-2": {}}},
{
"services": {
"service-1": {
"labels": [
f"user_id={USER_ID}",
f"study_id={PROJECT_ID}",
f"uuid={NODE_ID}",
]
},
"service-2": {
"labels": [
f"user_id={USER_ID}",
f"study_id={PROJECT_ID}",
f"uuid={NODE_ID}",
]
},
}
},
id="multiple_services",
),
],
)
async def test_update_container_labels(
service_spec: dict[str, Any], expected_result: dict[str, Any]
):
docker_compose_specs._update_container_labels(
service_spec, USER_ID, PROJECT_ID, NODE_ID
)
assert service_spec == expected_result

0 comments on commit 1da8dab

Please sign in to comment.