From cb4f983ad86ad3de1da95a8ea0e679373295f21a Mon Sep 17 00:00:00 2001 From: Gustavo Lima Chaves Date: Wed, 14 Apr 2021 15:09:09 -0700 Subject: [PATCH 1/3] =?UTF-8?q?=E2=9C=A8=20Make=20LocalNode=20and=20Remote?= =?UTF-8?q?Node=20extendable=20classes=20(yml-wise).?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit We do so by making use of BaseClassWithRunbookMixin, on the actual instance classes, and ExtendableSchemaMixin, on the respective schema classes. This makes it possible to have custom, type-defined sections on general (SUT) nodes coming from the runbook, together with Platform and Notifier. This will come in handy for LISA users who have node logic/fields that go beyond what is originally defined to work for (e.g.) Azure. Example of power/usage: By declaring this, on code: @dataclass_json() @dataclass class MyNodeSchema(schema.RemoteNode): type: str = field( default=MYNAME, metadata=schema.metadata( required=True, validate=validate.OneOf([MYNAME]), ), ) my_example_extra_field: Optional[str] = field(default=None) class MyNode(node.RemoteNode): def __init__( self, index: int, runbook: MyNodeSchema, logger_name: str, base_log_path: Optional[Path] = None, name: str = "", ) -> None: super().__init__(index, runbook, logger_name=logger_name, base_log_path=base_log_path, name=name) self.my_example_extra_field = runbook.my_example_extra_field assert self.my_example_extra_field, \ f"my_example_extra_field field of {MYNAME}-typed " \ "nodes cannot be empty " @classmethod def type_name(cls) -> str: return MYNAME @classmethod def type_schema(cls) -> Type[schema.TypedSchema]: return MyNodeSchema one is able to do this, yml-wise: environment: warn_as_error: true environments: - nodes: - type: MYNAME public_address: ... public_port: ... username: ... password: ... -> my_example_extra_field: ... Of course, custom logic for only that type of node will be at your fingertips, just by extending/overriding your node class. Of course this is advanced usage and not meant for the average user. UTs were added to help enforce regression testing. ammend to nodes --- examples/testsuites/withscript.py | 3 +- lisa/executable.py | 18 +- lisa/node.py | 374 +++++++++++++++--------------- lisa/runners/legacy_runner.py | 9 +- lisa/runners/lisa_runner.py | 3 +- lisa/schema.py | 105 +++++---- lisa/tests/test_environment.py | 162 +++++++++++-- lisa/tests/test_platform.py | 3 +- 8 files changed, 417 insertions(+), 260 deletions(-) diff --git a/examples/testsuites/withscript.py b/examples/testsuites/withscript.py index 6bed4ec4e3..baf4807ddf 100644 --- a/examples/testsuites/withscript.py +++ b/examples/testsuites/withscript.py @@ -8,6 +8,7 @@ from lisa import Node, TestCaseMetadata, TestSuite, TestSuiteMetadata from lisa.executable import CustomScript, CustomScriptBuilder +from lisa.node import is_remote from lisa.operating_system import Windows from lisa.testsuite import simple_requirement from lisa.util.perf_timer import create_timer @@ -44,7 +45,7 @@ def script(self, node: Node) -> None: timer2 = create_timer() result2 = script.run(force_run=True) assert_that(result1.stdout).is_equal_to(result2.stdout) - if node.is_remote: + if is_remote(node): # the timer will be significant different on a remote node. assert_that( timer1.elapsed(), "the second time should be faster, without uploading" diff --git a/lisa/executable.py b/lisa/executable.py index 50cc738c89..1f30376d91 100644 --- a/lisa/executable.py +++ b/lisa/executable.py @@ -8,6 +8,7 @@ from hashlib import sha256 from typing import TYPE_CHECKING, Any, Dict, List, Optional, Type, TypeVar, Union, cast +from lisa.schema import RemoteNode from lisa.util import InitializableMixin, LisaException, constants from lisa.util.logger import get_logger from lisa.util.perf_timer import create_timer @@ -20,6 +21,11 @@ T = TypeVar("T") +# circular dep if we use the helper in node.py, unfortunately +def _is_node_remote(node: Node) -> bool: + return isinstance(node.type_schema, RemoteNode) + + class Tool(ABC, InitializableMixin): """ The base class, which wraps an executable, package, or scripts on a node. @@ -246,10 +252,10 @@ def run( def get_tool_path(self) -> pathlib.PurePath: """ - compose a path, if the tool need to be installed + compose a path, if the tool needs to be installed """ - assert self.node.remote_working_path, "remote working path is not initialized" - return self.node.remote_working_path.joinpath(constants.PATH_TOOL, self.name) + assert self.node.working_path, "working path is not initialized" + return self.node.working_path.joinpath(constants.PATH_TOOL, self.name) def __call__( self, @@ -359,7 +365,7 @@ def dependencies(self) -> List[Type[Tool]]: return self._dependencies def install(self) -> bool: - if self.node.is_remote: + if _is_node_remote(self.node): # copy to remote node_script_path = self.get_tool_path() for file in self._files: @@ -495,7 +501,7 @@ def __getitem__(self, tool_type: Union[Type[T], CustomScriptBuilder, str]) -> T: is_success = tool.install() if not is_success: raise LisaException( - f"install '{tool.name}' failed. After installed, " + f"installing '{tool.name}' has failed. After installed, " f"it cannot be detected." ) tool_log.debug(f"installed in {timer}") @@ -503,7 +509,7 @@ def __getitem__(self, tool_type: Union[Type[T], CustomScriptBuilder, str]) -> T: raise LisaException( f"cannot find [{tool.name}] on [{self._node.name}], " f"{self._node.os.__class__.__name__}, " - f"Remote({self._node.is_remote}) " + f"Remote({_is_node_remote(self._node)}) " f"and installation of [{tool.name}] isn't enabled in lisa." ) else: diff --git a/lisa/node.py b/lisa/node.py index 6d552cc330..306ff75f92 100644 --- a/lisa/node.py +++ b/lisa/node.py @@ -5,49 +5,34 @@ from pathlib import Path, PurePath, PurePosixPath, PureWindowsPath from random import randint -from typing import Any, Iterable, List, Optional, TypeVar, Union, cast +from typing import Any, Iterable, List, Optional, Type, Union from lisa import schema from lisa.executable import Tools from lisa.feature import Features from lisa.operating_system import OperatingSystem from lisa.tools import Echo, Reboot -from lisa.util import ( - ContextMixin, - InitializableMixin, - LisaException, - constants, - fields_to_dict, -) +from lisa.util import ContextMixin, InitializableMixin, LisaException, constants from lisa.util.logger import get_logger from lisa.util.process import ExecutableResult, Process from lisa.util.shell import ConnectionInfo, LocalShell, Shell, SshShell +from lisa.util.subclasses import BaseClassWithRunbookMixin, Factory -T = TypeVar("T") - -class Node(ContextMixin, InitializableMixin): +class Node(BaseClassWithRunbookMixin, ContextMixin, InitializableMixin): def __init__( self, index: int, - capability: schema.NodeSpace, + runbook: schema.Node, logger_name: str, - is_remote: bool = True, - is_default: bool = False, base_log_path: Optional[Path] = None, ) -> None: - super().__init__() - self.is_default = is_default - self.is_remote = is_remote - self.capability = capability - self.name: str = "" + super().__init__(runbook=runbook) + self.is_default = runbook.is_default + self.capability = runbook.capability + self.name = runbook.name self.index = index - if self.is_remote: - self._shell: Optional[Shell] = None - else: - self._shell = LocalShell() - # will be initialized by platform self.features: Features self.tools = Tools(self) @@ -56,67 +41,26 @@ def __init__( self.log = get_logger(logger_name, node_id) # The working path will be created in remote node, when it's used. - self._remote_working_path: Optional[PurePath] = None + self._working_path: Optional[PurePath] = None self._base_local_log_path = base_log_path # Not to set the log path until its first used. Because the path # contains node name, which is not set in __init__. self._local_log_path: Optional[Path] = None self._support_sudo: Optional[bool] = None self._connection_info: Optional[ConnectionInfo] = None + self._shell: Optional[Union[LocalShell, SshShell]] = None + self.log.debug(f"adding new node {self.name}, is_default: {self.is_default}") - @staticmethod - def create( - index: int, - capability: schema.NodeSpace, - node_type: str = constants.ENVIRONMENTS_NODES_REMOTE, - is_default: bool = False, - logger_name: str = "node", - base_log_path: Optional[Path] = None, - ) -> Node: - if node_type == constants.ENVIRONMENTS_NODES_REMOTE: - is_remote = True - elif node_type == constants.ENVIRONMENTS_NODES_LOCAL: - is_remote = False - else: - raise LisaException(f"unsupported node_type '{node_type}'") - node = Node( - index, - capability=capability, - is_remote=is_remote, - is_default=is_default, - logger_name=logger_name, - base_log_path=base_log_path, - ) - node.log.debug(f"created, type: '{node_type}', default: {is_default}") - return node + def __str__(self) -> str: + raise NotImplementedError("base node class not meant to be used directly") - def set_connection_info( - self, - address: str = "", - port: int = 22, - public_address: str = "", - public_port: int = 22, - username: str = "root", - password: str = "", - private_key_file: str = "", - ) -> None: - if self._connection_info is not None: - raise LisaException( - "node is set connection information already, cannot set again" - ) + @classmethod + def type_schema(cls) -> Type[schema.TypedSchema]: + return schema.Node - self._connection_info = ConnectionInfo( - public_address, - public_port, - username, - password, - private_key_file, - ) - self._shell = SshShell(self._connection_info) - self.public_address = public_address - self.public_port = public_port - self.internal_address = address - self.internal_port = port + @property + def working_path(self) -> PurePath: + raise NotImplementedError("base node class not meant to be used directly") def reboot(self) -> None: self.tools[Reboot].reboot() @@ -154,7 +98,7 @@ def execute_async( if sudo and not self.support_sudo: raise LisaException( - f"node doesn't support [command] or [sudo], cannot execute: {cmd}" + f"{self} doesn't support [command] or [sudo], cannot execute: {cmd}" ) return self._execute( @@ -188,7 +132,9 @@ def support_sudo(self) -> bool: self._support_sudo = True else: self._support_sudo = False - self.log.debug("node doesn't support sudo, may cause failure later.") + self.log.debug( + f"{self} doesn't support sudo, it may cause failures later." + ) if self._support_sudo is None: # set Windows to true to ignore sudo asks. self._support_sudo = True @@ -223,35 +169,35 @@ def local_log_path(self) -> Path: return self._local_log_path - @property - def remote_working_path(self) -> PurePath: - if not self._remote_working_path: - if self.is_remote: - if self.is_posix: - remote_root_path = Path("$HOME") - else: - remote_root_path = Path("%TEMP%") - - working_path = remote_root_path.joinpath( - constants.PATH_REMOTE_ROOT, constants.RUN_LOGIC_PATH - ).as_posix() - - # expand environment variables in path - echo = self.tools[Echo] - result = echo.run(working_path, shell=True) - - # PurePath is more reasonable here, but spurplus doesn't support it. - if self.is_posix: - self._remote_working_path = PurePosixPath(result.stdout) - else: - self._remote_working_path = PureWindowsPath(result.stdout) - else: - self._remote_working_path = constants.RUN_LOCAL_PATH - - self.shell.mkdir(self._remote_working_path, parents=True, exist_ok=True) - self.log.debug(f"working path is: '{self._remote_working_path}'") + # FIXME: local nodes have a LocalShell, unless one sets connection + # info on them? Finish documenting that + def set_connection_info( + self, + address: str = "", + port: int = 22, + public_address: str = "", + public_port: int = 22, + username: str = "root", + password: str = "", + private_key_file: str = "", + ) -> None: + if self._connection_info is not None: + raise LisaException( + f"{self} is set connection information already, cannot set again" + ) - return self._remote_working_path + self._connection_info = ConnectionInfo( + public_address, + public_port, + username, + password, + private_key_file, + ) + self._shell = SshShell(self._connection_info) + self.public_address = public_address + self.public_port = public_port + self.internal_address = address + self.internal_port = port def close(self) -> None: self.log.debug("closing node connection...") @@ -259,14 +205,7 @@ def close(self) -> None: self._shell.close() def _initialize(self, *args: Any, **kwargs: Any) -> None: - if self.is_remote: - assert ( - self._connection_info - ), "call setConnectionInfo before use remote node" - address = str(self._connection_info) - else: - address = "(local)" - self.log.info(f"initializing node '{self.name}' {address}") + self.log.info(f"initializing node {self}") self.shell.initialize() self.os: OperatingSystem = OperatingSystem.create(self) @@ -292,6 +231,112 @@ def _execute( return process +class LocalNode(Node): + def __init__( + self, + index: int, + runbook: schema.LocalNode, + logger_name: str, + base_log_path: Optional[Path] = None, + name: str = "", + ) -> None: + super().__init__(index, runbook, logger_name) + self._shell = LocalShell() + + @classmethod + def type_name(cls) -> str: + return constants.ENVIRONMENTS_NODES_LOCAL + + def __str__(self) -> str: + return f"{self.name if self.name else 'unnamed'}" + "[local]" + + @classmethod + def type_schema(cls) -> Type[schema.TypedSchema]: + return schema.LocalNode + + @property + def working_path(self) -> PurePath: + if self._working_path: + return self._working_path + + self._working_path = constants.RUN_LOCAL_PATH + + self.shell.mkdir(self._working_path, parents=True, exist_ok=True) + self.log.debug(f"working path is: '{self._working_path}'") + + return self._working_path + + +class RemoteNode(Node): + def __init__( + self, + index: int, + runbook: schema.RemoteNode, + logger_name: str, + base_log_path: Optional[Path] = None, + name: str = "", + with_conn_info: bool = True, + ) -> None: + super().__init__(index, runbook, logger_name) + if not with_conn_info: + return + self.set_connection_info( + public_address=runbook.public_address, + public_port=runbook.public_port, + username=runbook.username, + password=runbook.password, + private_key_file=runbook.private_key_file, + ) + + @classmethod + def type_name(cls) -> str: + return constants.ENVIRONMENTS_NODES_REMOTE + + def __str__(self) -> str: + return ( + f"{self.name if self.name else 'unnamed'}[remote]--{self._connection_info}" + ) + + def _initialize(self, *args: Any, **kwargs: Any) -> None: + assert ( + self._connection_info + ), "call set_connection_info before using a remote node" + super()._initialize(args, kwargs) + + @classmethod + def type_schema(cls) -> Type[schema.TypedSchema]: + return schema.RemoteNode + + @property + def working_path(self) -> PurePath: + if self._working_path: + return self._working_path + + if self.is_posix: + remote_root_path = Path("$HOME") + else: + remote_root_path = Path("%TEMP%") + + working_path = remote_root_path.joinpath( + constants.PATH_REMOTE_ROOT, constants.RUN_LOGIC_PATH + ).as_posix() + + # expand environment variables in path + echo = self.tools[Echo] + result = echo.run(working_path, shell=True) + + # PurePath is more reasonable here, but spurplus doesn't support it. + if self.is_posix: + self._working_path = PurePosixPath(result.stdout) + else: + self._working_path = PureWindowsPath(result.stdout) + + self.shell.mkdir(self._working_path, parents=True, exist_ok=True) + self.log.debug(f"working path is: '{self._working_path}'") + + return self._working_path + + class Nodes: def __init__(self) -> None: super().__init__() @@ -356,21 +401,14 @@ def from_existing( environment_name: str, base_log_path: Optional[Path] = None, ) -> Node: - if isinstance(node_runbook, schema.LocalNode): - node = self._from_local( - node_runbook, - environment_name=environment_name, - base_log_path=base_log_path, - ) - else: - assert isinstance( - node_runbook, schema.RemoteNode - ), f"actual: {type(node_runbook)}" - node = self._from_remote( - node_runbook, - environment_name=environment_name, - base_log_path=base_log_path, - ) + node: Node = Factory[Node](Node).create_by_type_name( + node_runbook.type, + len(self._list), + runbook=node_runbook, + logger_name=environment_name, + ) + + self._list.append(node) return node def from_requirement( @@ -378,78 +416,28 @@ def from_requirement( node_requirement: schema.NodeSpace, environment_name: str, base_log_path: Optional[Path] = None, - ) -> Node: - min_requirement = cast( - schema.NodeSpace, node_requirement.generate_min_capability(node_requirement) - ) - assert isinstance(min_requirement.node_count, int), ( + ) -> RemoteNode: + min_cap = node_requirement.generate_min_capability(node_requirement) + assert isinstance(min_cap.node_count, int), ( f"must be int after generate_min_capability, " - f"actual: {min_requirement.node_count}" + f"actual: {min_cap.node_count}" ) # node count should be expanded in platform already - assert min_requirement.node_count == 1, f"actual: {min_requirement.node_count}" - node = Node.create( - len(self._list), - capability=min_requirement, - node_type=constants.ENVIRONMENTS_NODES_REMOTE, - is_default=node_requirement.is_default, - logger_name=environment_name, - base_log_path=base_log_path, - ) - self._list.append(node) - return node + assert min_cap.node_count == 1, f"actual: {min_cap.node_count}" + sch = schema.RemoteNode(_ignore_conn=True) + sch.capability = min_cap + sch.is_default = min_cap.is_default - def _from_local( - self, - node_runbook: schema.LocalNode, - environment_name: str, - base_log_path: Optional[Path] = None, - ) -> Node: - assert isinstance( - node_runbook, schema.LocalNode - ), f"actual: {type(node_runbook)}" - node = Node.create( + node = RemoteNode( len(self._list), - capability=node_runbook.capability, - node_type=node_runbook.type, - is_default=node_runbook.is_default, - logger_name=environment_name, + sch, base_log_path=base_log_path, - ) - self._list.append(node) - - return node - - def _from_remote( - self, - node_runbook: schema.RemoteNode, - environment_name: str, - base_log_path: Optional[Path] = None, - ) -> Node: - assert isinstance( - node_runbook, schema.RemoteNode - ), f"actual: {type(node_runbook)}" - - node = Node.create( - len(self._list), - capability=node_runbook.capability, - node_type=node_runbook.type, - is_default=node_runbook.is_default, logger_name=environment_name, - base_log_path=base_log_path, + with_conn_info=False, ) self._list.append(node) + return node - fields = [ - constants.ENVIRONMENTS_NODES_REMOTE_ADDRESS, - constants.ENVIRONMENTS_NODES_REMOTE_PORT, - constants.ENVIRONMENTS_NODES_REMOTE_PUBLIC_ADDRESS, - constants.ENVIRONMENTS_NODES_REMOTE_PUBLIC_PORT, - constants.ENVIRONMENTS_NODES_REMOTE_USERNAME, - constants.ENVIRONMENTS_NODES_REMOTE_PASSWORD, - constants.ENVIRONMENTS_NODES_REMOTE_PRIVATE_KEY_FILE, - ] - parameters = fields_to_dict(node_runbook, fields) - node.set_connection_info(**parameters) - return node +def is_remote(node: Node) -> bool: + return isinstance(node, RemoteNode) diff --git a/lisa/runners/legacy_runner.py b/lisa/runners/legacy_runner.py index 7212c8a999..d7562a5cfa 100644 --- a/lisa/runners/legacy_runner.py +++ b/lisa/runners/legacy_runner.py @@ -14,7 +14,7 @@ from retry import retry from lisa import schema -from lisa.node import Node +from lisa.node import LocalNode from lisa.runner import BaseRunner from lisa.testsuite import TestCaseMetadata, TestCaseRuntimeData, TestResult, TestStatus from lisa.tools import Git @@ -71,11 +71,10 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) self.exit_code: int = 0 - # leverage Node logic to run local processes. - self._local = Node.create( + # leverage LocalNode logic to run local processes. + self._local = LocalNode( index=-1, - node_type=constants.ENVIRONMENTS_NODES_LOCAL, - capability=schema.NodeSpace(), + runbook=schema.LocalNode(), logger_name="LISAv2", ) self.canceled = False diff --git a/lisa/runners/lisa_runner.py b/lisa/runners/lisa_runner.py index b3b0f168d7..f03a586486 100644 --- a/lisa/runners/lisa_runner.py +++ b/lisa/runners/lisa_runner.py @@ -91,7 +91,8 @@ def _run(self, id_: str) -> List[TestResult]: if picked_result is None: self._log.debug( f"env[{environment.name}] skipped " - f"as not meet any case requirement" + f"as LISA has not met any case requirement match" + + " against test cases" ) continue diff --git a/lisa/schema.py b/lisa/schema.py index d1f6393515..2fda960a90 100644 --- a/lisa/schema.py +++ b/lisa/schema.py @@ -4,7 +4,18 @@ import copy from dataclasses import dataclass, field from enum import Enum -from typing import Any, Callable, Dict, List, Optional, Type, TypeVar, Union, cast +from typing import ( + Any, + Callable, + Dict, + Iterable, + List, + Optional, + Type, + TypeVar, + Union, + cast, +) from dataclasses_json import ( CatchAll, @@ -573,9 +584,21 @@ def __post_init__(self, *args: Any, **kwargs: Any) -> None: self.node_count = 1 -@dataclass_json() +@dataclass_json(undefined=Undefined.INCLUDE) +@dataclass +class Node(TypedSchema, ExtendableSchemaMixin): + name: str = "" + type: str = field( + default="", + metadata=metadata(required=True), + ) + is_default: bool = field(default=False) + capability: Capability = field(default_factory=Capability) + + +@dataclass_json(undefined=Undefined.INCLUDE) @dataclass -class LocalNode(TypedSchema): +class LocalNode(Node): type: str = field( default=constants.ENVIRONMENTS_NODES_LOCAL, metadata=metadata( @@ -583,14 +606,11 @@ class LocalNode(TypedSchema): validate=validate.OneOf([constants.ENVIRONMENTS_NODES_LOCAL]), ), ) - name: str = "" - is_default: bool = field(default=False) - capability: Capability = field(default_factory=Capability) -@dataclass_json() +@dataclass_json(undefined=Undefined.INCLUDE) @dataclass -class RemoteNode(TypedSchema): +class RemoteNode(Node): type: str = field( default=constants.ENVIRONMENTS_NODES_REMOTE, metadata=metadata( @@ -598,8 +618,6 @@ class RemoteNode(TypedSchema): validate=validate.OneOf([constants.ENVIRONMENTS_NODES_REMOTE]), ), ) - name: str = "" - is_default: bool = field(default=False) address: str = "" port: int = field( default=22, metadata=metadata(validate=validate.Range(min=1, max=65535)) @@ -612,7 +630,10 @@ class RemoteNode(TypedSchema): username: str = field(default="", metadata=metadata(required=True)) password: str = "" private_key_file: str = "" - capability: Capability = field(default_factory=Capability) + + # FIXME: ideally this should have been an InitVar field, but it + # does not play well with Undefined.INCLUDE + _ignore_conn: bool = False def __post_init__(self, *args: Any, **kwargs: Any) -> None: add_secret(self.address) @@ -621,6 +642,9 @@ def __post_init__(self, *args: Any, **kwargs: Any) -> None: add_secret(self.password) add_secret(self.private_key_file) + if self._ignore_conn: + return + if not self.address and not self.public_address: raise LisaException( "at least one of address and public_address need to be set" @@ -658,36 +682,37 @@ class Environment: nodes_requirement: Optional[List[NodeSpace]] = None def __post_init__(self, *args: Any, **kwargs: Any) -> None: - self.nodes: Optional[List[Union[LocalNode, RemoteNode]]] = None - if self.nodes_raw is not None: - self.nodes = [] - for node_raw in self.nodes_raw: - node_type = node_raw[constants.TYPE] - if node_type == constants.ENVIRONMENTS_NODES_LOCAL: - node: Union[ - LocalNode, RemoteNode - ] = LocalNode.schema().load( # type:ignore - node_raw - ) - if self.nodes is None: - self.nodes = [] - self.nodes.append(node) - elif node_type == constants.ENVIRONMENTS_NODES_REMOTE: - node = RemoteNode.schema().load(node_raw) # type:ignore - if self.nodes is None: - self.nodes = [] - self.nodes.append(node) - elif node_type == constants.ENVIRONMENTS_NODES_REQUIREMENT: - original_req: NodeSpace = NodeSpace.schema().load( # type:ignore - node_raw - ) - expanded_req = original_req.expand_by_node_count() - if self.nodes_requirement is None: - self.nodes_requirement = [] - self.nodes_requirement.extend(expanded_req) + self.nodes: List[Union[LocalNode, RemoteNode]] = [] + if not self.nodes_raw: + return + nodes_final: List[Union[LocalNode, RemoteNode]] = [] + for node in self.nodes_raw: + node_type = node[constants.TYPE] + new_node: Union[LocalNode, RemoteNode] + if node_type == constants.ENVIRONMENTS_NODES_REQUIREMENT: + original_req: NodeSpace = NodeSpace.schema().load( # type:ignore + node + ) + expanded_req = original_req.expand_by_node_count() + if self.nodes_requirement is None: + self.nodes_requirement = [] + self.nodes_requirement.extend(expanded_req) + else: + # here we honor LocalNode, RemoteNode and all their extensions + subs: Dict[str, Type[Node]] = {} + for sub in self._get_subclasses(Node): + subs[sub.type] = sub + if node_type not in subs: + raise LisaException(f"unknown node type '{node_type}': {node}") else: - raise LisaException(f"unknown node type '{node_type}': {node_raw}") - self.nodes_raw = None + new_node = subs[node_type].schema().load(node) # type: ignore + nodes_final.append(new_node) + self.nodes = nodes_final + + def _get_subclasses(self, t: Type[Node]) -> Iterable[Type[Node]]: + for subclass_type in t.__subclasses__(): + yield subclass_type + yield from self._get_subclasses(subclass_type) @dataclass_json() diff --git a/lisa/tests/test_environment.py b/lisa/tests/test_environment.py index 6d357a8275..ec6b5d8ffe 100644 --- a/lisa/tests/test_environment.py +++ b/lisa/tests/test_environment.py @@ -1,20 +1,117 @@ # Copyright (c) Microsoft Corporation. # Licensed under the MIT license. -from typing import Any, List +from dataclasses import dataclass, field +from pathlib import Path +from typing import Any, List, Optional, Type, cast from unittest import TestCase -from lisa import schema, search_space +from dataclasses_json import dataclass_json +from marshmallow import validate + +from lisa import node, schema, search_space from lisa.environment import load_environments from lisa.testsuite import simple_requirement from lisa.util import constants +CUSTOM_LOCAL = "custom_local" +CUSTOM_REMOTE = "custom_remote" + + +@dataclass_json() +@dataclass +class CustomLocalNodeSchema(schema.LocalNode): + type: str = field( + default=CUSTOM_LOCAL, + metadata=schema.metadata( + required=True, + validate=validate.OneOf([CUSTOM_LOCAL]), + ), + ) + + custom_local_field: Optional[str] = field(default=None) + + +class CustomLocalNode(node.LocalNode): + def __init__( + self, + index: int, + runbook: CustomLocalNodeSchema, + logger_name: str, + base_log_path: Optional[Path] = None, + name: str = "", + ) -> None: + super().__init__( + index, + runbook, + logger_name=logger_name, + base_log_path=base_log_path, + name=name, + ) + self.custom_local_field = runbook.custom_local_field + assert ( + self.custom_local_field + ), f"custom_local_field field of {CUSTOM_LOCAL}-typed nodes cannot be empty" + + @classmethod + def type_name(cls) -> str: + return CUSTOM_LOCAL + + @classmethod + def type_schema(cls) -> Type[schema.TypedSchema]: + return CustomLocalNodeSchema + + +@dataclass_json() +@dataclass +class CustomRemoteNodeSchema(schema.RemoteNode): + type: str = field( + default=CUSTOM_REMOTE, + metadata=schema.metadata( + required=True, + validate=validate.OneOf([CUSTOM_REMOTE]), + ), + ) + + custom_remote_field: Optional[str] = field(default=None) + + +class CustomRemoteNode(node.RemoteNode): + def __init__( + self, + index: int, + runbook: CustomRemoteNodeSchema, + logger_name: str, + base_log_path: Optional[Path] = None, + name: str = "", + ) -> None: + super().__init__( + index, + runbook, + logger_name=logger_name, + base_log_path=base_log_path, + name=name, + ) + self.custom_remote_field = runbook.custom_remote_field + assert ( + self.custom_remote_field + ), f"custom_remote_field field of {CUSTOM_REMOTE}-typed nodes cannot be empty" + + @classmethod + def type_name(cls) -> str: + return CUSTOM_REMOTE + + @classmethod + def type_schema(cls) -> Type[schema.TypedSchema]: + return CustomRemoteNodeSchema + def generate_runbook( is_single_env: bool = False, local: bool = False, remote: bool = False, requirement: bool = False, + local_remote_node_extensions: bool = False, ) -> schema.EnvironmentRoot: environments: List[Any] = list() nodes: List[Any] = list() @@ -47,11 +144,31 @@ def generate_runbook( "nic_count": {"min": 1, "max": 1}, } ) + if local_remote_node_extensions: + nodes.extend( + [ + { + constants.TYPE: CUSTOM_LOCAL, + constants.ENVIRONMENTS_NODES_CAPABILITY: {"core_count": {"min": 4}}, + "custom_local_field": CUSTOM_LOCAL, + }, + { + constants.TYPE: CUSTOM_REMOTE, + constants.ENVIRONMENTS_NODES_REMOTE_ADDRESS: "internal_address", + constants.ENVIRONMENTS_NODES_REMOTE_PORT: 22, + "public_address": "public_address", + "public_port": 10022, + constants.ENVIRONMENTS_NODES_REMOTE_USERNAME: "name_of_user", + constants.ENVIRONMENTS_NODES_REMOTE_PASSWORD: "do_not_use_it", + "custom_remote_field": CUSTOM_REMOTE, + }, + ] + ) if is_single_env: environments = [{"nodes": nodes}] else: - for node in nodes: - environments.append({"nodes": [node]}) + for n in nodes: + environments.append({"nodes": [n]}) data = {"max_concurrency": 2, constants.ENVIRONMENTS: environments} return schema.EnvironmentRoot.schema().load(data) # type: ignore @@ -70,9 +187,9 @@ def test_create_from_runbook_split(self) -> None: envs = load_environments(runbook) self.assertEqual(2, len(envs)) for env in envs.values(): - for node in env.nodes.list(): + for n in env.nodes.list(): # mock initializing - node._is_initialized = True + n._is_initialized = True self.assertEqual(1, len(env.nodes)) def test_create_from_runbook_merged(self) -> None: @@ -80,9 +197,9 @@ def test_create_from_runbook_merged(self) -> None: envs = load_environments(runbook) self.assertEqual(1, len(envs)) for env in envs.values(): - for node in env.nodes.list(): + for n in env.nodes.list(): # mock initializing - node._is_initialized = True + n._is_initialized = True self.assertEqual(2, len(env.nodes)) def test_create_from_runbook_cap(self) -> None: @@ -91,11 +208,11 @@ def test_create_from_runbook_cap(self) -> None: self.assertEqual(2, len(envs)) env = envs.get("customized_0") assert env - for node in env.nodes.list(): + for n in env.nodes.list(): # mock initializing - node._is_initialized = True - self.assertEqual(search_space.IntRange(min=4), node.capability.core_count) - self.assertEqual(search_space.IntRange(min=1), node.capability.disk_count) + n._is_initialized = True + self.assertEqual(search_space.IntRange(min=4), n.capability.core_count) + self.assertEqual(search_space.IntRange(min=1), n.capability.disk_count) # check from env capability env_cap = env.capability self.assertEqual(1, len(env_cap.nodes)) @@ -129,6 +246,25 @@ def test_create_from_requirement(self) -> None: self.assertEqual(1, len(envs), "get or create again won't create new") assert env self.assertEqual(0, len(env.nodes)) - self.assertIsNone(env.runbook.nodes) + self.assertSequenceEqual([], env.runbook.nodes) assert env.runbook.nodes_requirement self.assertEqual(2, len(env.runbook.nodes_requirement)) + + def test_create_from_custom_local_remote(self) -> None: + runbook = generate_runbook( + local_remote_node_extensions=True, is_single_env=True + ) + envs = load_environments(runbook) + self.assertEqual(1, len(envs)) + for env in envs.values(): + done: int = 0 + for n in env.nodes.list(): + if n.type_name() == CUSTOM_LOCAL: + l_n = cast(CustomLocalNode, n) + self.assertEqual(l_n.custom_local_field, CUSTOM_LOCAL) + done += 1 + elif n.type_name() == CUSTOM_REMOTE: + r_n = cast(CustomRemoteNode, n) + self.assertEqual(r_n.custom_remote_field, CUSTOM_REMOTE) + done += 1 + self.assertEqual(2, done) diff --git a/lisa/tests/test_platform.py b/lisa/tests/test_platform.py index 3b681aa804..dbc00e5953 100644 --- a/lisa/tests/test_platform.py +++ b/lisa/tests/test_platform.py @@ -150,7 +150,8 @@ def test_prepared_env_not_success_with_exception(self) -> None: self.assertEqual( "no capability found for environment: " "Environment(name='customized_0', topology='subnet', " - "nodes_raw=None, nodes_requirement=None)", + "nodes_raw=[{'type': 'local', 'capability': {'core_count': {'min': 4}}}]" + ", nodes_requirement=None)", str(cm.exception), ) From b0bc8228845e1c57e10b0e0f65bcf70339f1b750 Mon Sep 17 00:00:00 2001 From: Gustavo Lima Chaves Date: Wed, 14 Apr 2021 15:14:32 -0700 Subject: [PATCH 2/3] poetry: update --- poetry.lock | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/poetry.lock b/poetry.lock index e297969bd8..37a88c777c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -304,7 +304,7 @@ dev = ["pytest", "ipython", "mypy (>=0.710)", "hypothesis", "portray", "flake8", [[package]] name = "decorator" -version = "5.0.6" +version = "5.0.7" description = "Decorators for Humans" category = "main" optional = false @@ -312,7 +312,7 @@ python-versions = ">=3.5" [[package]] name = "flake8" -version = "3.9.0" +version = "3.9.1" description = "the modular source code checker: pep8 pyflakes and co" category = "dev" optional = false @@ -1267,7 +1267,6 @@ coverage = [ ] cryptography = [ {file = "cryptography-3.4.7-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:3d8427734c781ea5f1b41d6589c293089704d4759e34597dce91014ac125aad1"}, - {file = "cryptography-3.4.7-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:8e56e16617872b0957d1c9742a3f94b43533447fd78321514abbe7db216aa250"}, {file = "cryptography-3.4.7-cp36-abi3-manylinux2010_x86_64.whl", hash = "sha256:37340614f8a5d2fb9aeea67fd159bfe4f5f4ed535b1090ce8ec428b2f15a11f2"}, {file = "cryptography-3.4.7-cp36-abi3-manylinux2014_aarch64.whl", hash = "sha256:240f5c21aef0b73f40bb9f78d2caff73186700bf1bc6b94285699aff98cc16c6"}, {file = "cryptography-3.4.7-cp36-abi3-manylinux2014_x86_64.whl", hash = "sha256:1e056c28420c072c5e3cb36e2b23ee55e260cb04eee08f702e0edfec3fb51959"}, @@ -1284,12 +1283,12 @@ dataclasses-json = [ {file = "dataclasses_json-0.5.2-py3-none-any.whl", hash = "sha256:b746c48d9d8e884e2a0ffa59c6220a1b21f94d4f9f12c839da0a8a0efd36dc19"}, ] decorator = [ - {file = "decorator-5.0.6-py3-none-any.whl", hash = "sha256:d9f2d2863183a3c0df05f4b786f2e6b8752c093b3547a558f287bf3022fd2bf4"}, - {file = "decorator-5.0.6.tar.gz", hash = "sha256:f2e71efb39412bfd23d878e896a51b07744f2e2250b2e87d158e76828c5ae202"}, + {file = "decorator-5.0.7-py3-none-any.whl", hash = "sha256:945d84890bb20cc4a2f4a31fc4311c0c473af65ea318617f13a7257c9a58bc98"}, + {file = "decorator-5.0.7.tar.gz", hash = "sha256:6f201a6c4dac3d187352661f508b9364ec8091217442c9478f1f83c003a0f060"}, ] flake8 = [ - {file = "flake8-3.9.0-py2.py3-none-any.whl", hash = "sha256:12d05ab02614b6aee8df7c36b97d1a3b2372761222b19b58621355e82acddcff"}, - {file = "flake8-3.9.0.tar.gz", hash = "sha256:78873e372b12b093da7b5e5ed302e8ad9e988b38b063b61ad937f26ca58fc5f0"}, + {file = "flake8-3.9.1-py2.py3-none-any.whl", hash = "sha256:3b9f848952dddccf635be78098ca75010f073bfe14d2c6bda867154bea728d2a"}, + {file = "flake8-3.9.1.tar.gz", hash = "sha256:1aa8990be1e689d96c745c5682b687ea49f2e05a443aff1f8251092b0014e378"}, ] flake8-black = [ {file = "flake8-black-0.2.1.tar.gz", hash = "sha256:f26651bc10db786c03f4093414f7c9ea982ed8a244cec323c984feeffdf4c118"}, From d61780e1ce95514233fdc8890848ecb57bc7bf08 Mon Sep 17 00:00:00 2001 From: Gustavo Lima Chaves Date: Mon, 19 Apr 2021 16:45:10 -0700 Subject: [PATCH 3/3] =?UTF-8?q?=F0=9F=94=8A=20[wait=5Ftcp=5Fport=5Fready]?= =?UTF-8?q?=20make=20logs=20more=20meaningful,=20by=20showing=20name=20of?= =?UTF-8?q?=20host?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The user deserves to know which host was unreachable, at least, not just the port: ```2021-04-19 23:54:54.453 INFO LISA.lisa 'customized_0' attached to test case 'ATest.a_test': [Errno -2] Name or service not known``` was not telling anything --- lisa/util/shell.py | 28 ++++++++++++++++------------ 1 file changed, 16 insertions(+), 12 deletions(-) diff --git a/lisa/util/shell.py b/lisa/util/shell.py index 3994193dc8..b39f08ee4d 100644 --- a/lisa/util/shell.py +++ b/lisa/util/shell.py @@ -38,18 +38,22 @@ def wait_tcp_port_ready( timout_timer = create_timer() while timout_timer.elapsed(False) < timeout: with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as tcp_socket: - result = tcp_socket.connect_ex((address, port)) - if result == 0: - is_ready = True - break - else: - if times % 10 == 0 and log: - log.debug( - f"cannot connect to TCP port: {port}, error code: {result}, " - f"tried times: {times}, elapsed: {timout_timer}. retrying..." - ) - sleep(1) - times += 1 + try: + result = tcp_socket.connect_ex((address, port)) + if result == 0: + is_ready = True + break + else: + if times % 10 == 0 and log: + log.debug( + f"cannot connect to {address}:{port}, " + f"error code: {result}, tried times: {times}," + f" elapsed: {timout_timer}. retrying..." + ) + sleep(1) + times += 1 + except Exception as e: + raise LisaException(f"failed to connect to {address}:{port}: {e}") return is_ready, result