diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index dd7dd2c1..2b079e60 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -109,9 +109,8 @@ jobs: # Runs the sdk features repo tests with this repo's current SDK code features-tests: - uses: temporalio/features/.github/workflows/python.yaml@uv + uses: temporalio/features/.github/workflows/python.yaml@main with: python-repo-path: ${{github.event.pull_request.head.repo.full_name}} version: ${{github.event.pull_request.head.ref}} version-is-repo-ref: true - features-repo-ref: uv diff --git a/pyproject.toml b/pyproject.toml index d29dc2df..2ca4d191 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -54,24 +54,29 @@ dev = [ [tool.poe.tasks] build-develop = "uv run maturin develop --uv" build-develop-with-release = { cmd = "uv run maturin develop --release --uv" } -format = [{cmd = "uv run ruff check --select I --fix"}, {cmd = "uv run ruff format"}, ] +format = [{cmd = "uv run ruff check --fix"}, {cmd = "uv run ruff format"}, ] gen-docs = "uv run python scripts/gen_docs.py" gen-protos = "uv run python scripts/gen_protos.py" lint = [ - {cmd = "uv run ruff check --select I"}, + {cmd = "uv run ruff check"}, {cmd = "uv run ruff format --check"}, - {ref = "lint-types"}, - {cmd = "uv run pyright"}, + {ref = "lint-types-mypy"}, + {ref = "lint-types-pyright"}, {ref = "lint-docs"}, ] bridge-lint = { cmd = "cargo clippy -- -D warnings", cwd = "temporalio/bridge" } # TODO(cretz): Why does pydocstyle complain about @overload missing docs after # https://github.com/PyCQA/pydocstyle/pull/511? lint-docs = "uv run pydocstyle --ignore-decorators=overload" -lint-types = "uv run mypy --namespace-packages --check-untyped-defs ." +lint-types-mypy = "uv run mypy --namespace-packages --check-untyped-defs ." +lint-types-pyright = "uv run pyright" run-bench = "uv run python scripts/run_bench.py" test = "uv run pytest" +[tool.ruff] +target-version = "py39" +exclude = ["*_pb2.py", "*_pb2_grpc.py"] +lint.ignore = ["E741"] # we occasionally use e.g. O as a type var and l as a loop variable [tool.pytest.ini_options] asyncio_mode = "auto" @@ -183,9 +188,6 @@ exclude = [ "temporalio/bridge/testing.py", ] -[tool.ruff] -target-version = "py39" - [build-system] requires = ["maturin>=1.0,<2.0"] build-backend = "maturin" diff --git a/scripts/gen_protos.py b/scripts/gen_protos.py index 61d2709f..6928dead 100644 --- a/scripts/gen_protos.py +++ b/scripts/gen_protos.py @@ -6,7 +6,7 @@ import tempfile from functools import partial from pathlib import Path -from typing import List, Mapping, Optional +from typing import List, Mapping base_dir = Path(__file__).parent.parent proto_dir = ( @@ -25,8 +25,8 @@ v for v in proto_dir.glob("**/*.proto") if not str(v).startswith(str(testsrv_proto_dir / "dependencies")) - and not "health" in str(v) - and not "google" in str(v) + and "health" not in str(v) + and "google" not in str(v) ] proto_paths.extend(test_proto_dir.glob("**/*.proto")) proto_paths.extend(additional_proto_dir.glob("**/*.proto")) @@ -95,7 +95,7 @@ def fix_generated_output(base_path: Path): message_names = sorted(message_names) if message_names: f.write( - f'\n__all__ = [\n "' + '",\n "'.join(message_names) + '",\n]\n' + '\n__all__ = [\n "' + '",\n "'.join(message_names) + '",\n]\n' ) # gRPC imports if "service_pb2_grpc" in imports: @@ -115,7 +115,7 @@ def fix_generated_output(base_path: Path): message_names.append(message) # __all__ message_names = sorted(message_names) - f.write(f' __all__.extend(["' + '", "'.join(message_names) + '"])\n') + f.write(' __all__.extend(["' + '", "'.join(message_names) + '"])\n') f.write("except ImportError:\n pass") diff --git a/temporalio/activity.py b/temporalio/activity.py index 281cfcb8..84fe9b04 100644 --- a/temporalio/activity.py +++ b/temporalio/activity.py @@ -244,7 +244,7 @@ def in_activity() -> bool: Returns: True if in an activity, False otherwise. """ - return not _current_context.get(None) is None + return _current_context.get(None) is not None def info() -> Info: diff --git a/temporalio/api/cloud/cloudservice/v1/__init__.py b/temporalio/api/cloud/cloudservice/v1/__init__.py index 4ae8e6aa..92369406 100644 --- a/temporalio/api/cloud/cloudservice/v1/__init__.py +++ b/temporalio/api/cloud/cloudservice/v1/__init__.py @@ -194,9 +194,9 @@ # gRPC is optional try: - import grpc + import grpc # noqa: F401 - from .service_pb2_grpc import ( + from .service_pb2_grpc import ( # noqa: F401 CloudServiceServicer, CloudServiceStub, add_CloudServiceServicer_to_server, diff --git a/temporalio/api/operatorservice/v1/__init__.py b/temporalio/api/operatorservice/v1/__init__.py index f6cd76fb..1a7e4f66 100644 --- a/temporalio/api/operatorservice/v1/__init__.py +++ b/temporalio/api/operatorservice/v1/__init__.py @@ -56,9 +56,9 @@ # gRPC is optional try: - import grpc + import grpc # noqa: F401 - from .service_pb2_grpc import ( + from .service_pb2_grpc import ( # noqa: F401 OperatorServiceServicer, OperatorServiceStub, add_OperatorServiceServicer_to_server, diff --git a/temporalio/api/testservice/v1/__init__.py b/temporalio/api/testservice/v1/__init__.py index 8539030d..339fc773 100644 --- a/temporalio/api/testservice/v1/__init__.py +++ b/temporalio/api/testservice/v1/__init__.py @@ -22,9 +22,9 @@ # gRPC is optional try: - import grpc + import grpc # noqa: F401 - from .service_pb2_grpc import ( + from .service_pb2_grpc import ( # noqa: F401 TestServiceServicer, TestServiceStub, add_TestServiceServicer_to_server, diff --git a/temporalio/api/workflowservice/v1/__init__.py b/temporalio/api/workflowservice/v1/__init__.py index b5e22d32..c416d3c6 100644 --- a/temporalio/api/workflowservice/v1/__init__.py +++ b/temporalio/api/workflowservice/v1/__init__.py @@ -278,9 +278,9 @@ # gRPC is optional try: - import grpc + import grpc # noqa: F401 - from .service_pb2_grpc import ( + from .service_pb2_grpc import ( # noqa: F401 WorkflowServiceServicer, WorkflowServiceStub, add_WorkflowServiceServicer_to_server, diff --git a/temporalio/bridge/client.py b/temporalio/bridge/client.py index ddcee444..24607b25 100644 --- a/temporalio/bridge/client.py +++ b/temporalio/bridge/client.py @@ -13,7 +13,7 @@ import temporalio.bridge.runtime import temporalio.bridge.temporal_sdk_bridge -from temporalio.bridge.temporal_sdk_bridge import RPCError +from temporalio.bridge.temporal_sdk_bridge import RPCError # noqa: F401 @dataclass diff --git a/temporalio/bridge/worker.py b/temporalio/bridge/worker.py index b04af11f..e4bab1f9 100644 --- a/temporalio/bridge/worker.py +++ b/temporalio/bridge/worker.py @@ -35,7 +35,7 @@ from temporalio.bridge.temporal_sdk_bridge import ( CustomSlotSupplier as BridgeCustomSlotSupplier, ) -from temporalio.bridge.temporal_sdk_bridge import PollShutdownError +from temporalio.bridge.temporal_sdk_bridge import PollShutdownError # noqa: F401 @dataclass diff --git a/temporalio/common.py b/temporalio/common.py index e073aaec..10c17a29 100644 --- a/temporalio/common.py +++ b/temporalio/common.py @@ -30,7 +30,7 @@ ) import google.protobuf.internal.containers -from typing_extensions import ClassVar, NamedTuple, Self, TypeAlias, get_origin +from typing_extensions import NamedTuple, Self, TypeAlias, get_origin import temporalio.api.common.v1 import temporalio.api.enums.v1 diff --git a/temporalio/converter.py b/temporalio/converter.py index 37e7641d..3913de85 100644 --- a/temporalio/converter.py +++ b/temporalio/converter.py @@ -926,7 +926,7 @@ def from_failure( stack_trace = encoded_attributes.get("stack_trace") if isinstance(stack_trace, str): failure.stack_trace = stack_trace - except: + except BaseException: pass err: temporalio.exceptions.FailureError diff --git a/temporalio/runtime.py b/temporalio/runtime.py index fe6a26ca..a2f90b9e 100644 --- a/temporalio/runtime.py +++ b/temporalio/runtime.py @@ -207,7 +207,7 @@ def _on_logs( # just in case) try: message += f" {log.fields}" - except: + except BaseException: pass record = self.logger.makeRecord( name, diff --git a/temporalio/testing/_workflow.py b/temporalio/testing/_workflow.py index d6992a06..2d86838f 100644 --- a/temporalio/testing/_workflow.py +++ b/temporalio/testing/_workflow.py @@ -215,10 +215,10 @@ async def start_local( ), server, ) - except: + except BaseException: try: await server.shutdown() - except: + except BaseException: logger.warn( "Failed stopping local server on client connection failure", exc_info=True, @@ -326,10 +326,10 @@ async def start_time_skipping( ), server, ) - except: + except BaseException: try: await server.shutdown() - except: + except BaseException: logger.warn( "Failed stopping test server on client connection failure", exc_info=True, @@ -474,13 +474,13 @@ async def time_skipping_unlocked(self) -> AsyncIterator[None]: await self.client.test_service.lock_time_skipping( temporalio.api.testservice.v1.LockTimeSkippingRequest() ) - except: + except Exception: # Lock it back, swallowing error try: await self.client.test_service.lock_time_skipping( temporalio.api.testservice.v1.LockTimeSkippingRequest() ) - except: + except Exception: logger.exception("Failed locking time skipping after error") raise diff --git a/temporalio/worker/_activity.py b/temporalio/worker/_activity.py index 9c4889c9..e02a8c37 100644 --- a/temporalio/worker/_activity.py +++ b/temporalio/worker/_activity.py @@ -518,7 +518,7 @@ async def _run_activity( if running_activity.last_heartbeat_task: try: await running_activity.last_heartbeat_task - except: + except BaseException: # Should never happen because it's trapped in-task temporalio.activity.logger.exception( "Final heartbeat task didn't trap error" @@ -750,7 +750,7 @@ def _execute_sync_activity( if isinstance(heartbeat, SharedHeartbeatSender): # To make mypy happy heartbeat_sender = heartbeat - heartbeat_fn = lambda *details: heartbeat_sender.send_heartbeat( + heartbeat_fn = lambda *details: heartbeat_sender.send_heartbeat( # noqa: E731 info.task_token, *details ) else: diff --git a/temporalio/worker/_replayer.py b/temporalio/worker/_replayer.py index 05f65cbd..f3f7d6f2 100644 --- a/temporalio/worker/_replayer.py +++ b/temporalio/worker/_replayer.py @@ -156,7 +156,7 @@ async def workflow_replay_iterator( replayed. """ try: - last_replay_failure: Optional[Exception] + last_replay_failure: Optional[Exception] = None last_replay_complete = asyncio.Event() # Create eviction hook diff --git a/temporalio/worker/_worker.py b/temporalio/worker/_worker.py index 4c34a950..62ab5682 100644 --- a/temporalio/worker/_worker.py +++ b/temporalio/worker/_worker.py @@ -29,7 +29,7 @@ from ._activity import SharedStateManager, _ActivityWorker from ._interceptor import Interceptor -from ._tuning import WorkerTuner, _to_bridge_slot_supplier +from ._tuning import WorkerTuner from ._workflow import _WorkflowWorker from ._workflow_instance import UnsandboxedWorkflowRunner, WorkflowRunner from .workflow_sandbox import SandboxedWorkflowRunner @@ -504,7 +504,7 @@ async def raise_on_shutdown(): if self._config["on_fatal_error"]: try: await self._config["on_fatal_error"](exception) - except: + except BaseException: logger.warning("Fatal error handler failed") except asyncio.CancelledError as user_cancel_err: @@ -560,7 +560,7 @@ async def raise_on_shutdown(): # Do final shutdown try: await self._bridge_worker.finalize_shutdown() - except: + except BaseException: # Ignore errors here that can arise in some tests where the bridge # worker still has a reference pass diff --git a/temporalio/worker/workflow_sandbox/_restrictions.py b/temporalio/worker/workflow_sandbox/_restrictions.py index fdc12680..c1217469 100644 --- a/temporalio/worker/workflow_sandbox/_restrictions.py +++ b/temporalio/worker/workflow_sandbox/_restrictions.py @@ -933,7 +933,8 @@ def __init__( def bind_f(instance: _RestrictedProxy, obj: Any) -> Callable: def i_op(self: Any, other: Any) -> _RestrictedProxy: - f(self, other) # type: ignore + # TODO: wat + f(self, other) # type: ignore # noqa: F821 return instance return i_op.__get__(obj, type(obj)) # type: ignore diff --git a/temporalio/workflow.py b/temporalio/workflow.py index 0a298a03..8105e373 100644 --- a/temporalio/workflow.py +++ b/temporalio/workflow.py @@ -1660,7 +1660,7 @@ def _assert_dynamic_handler_args( if ( not arg_types or len(arg_types) != 2 - or arg_types[0] != str + or arg_types[0] is not str or arg_types[1] != Sequence[temporalio.common.RawValue] ): raise RuntimeError( @@ -4273,7 +4273,7 @@ class ContinueAsNewError(BaseException): def __init__(self, *args: object) -> None: """Direct instantiation is disabled. Use :py:func:`continue_as_new`.""" - if type(self) == ContinueAsNewError: + if type(self) is ContinueAsNewError: raise RuntimeError("Cannot instantiate ContinueAsNewError directly") super().__init__(*args) diff --git a/tests/conftest.py b/tests/conftest.py index 1c8bbf9d..041855bd 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,6 +1,4 @@ import asyncio -import logging -import multiprocessing import os import sys from typing import AsyncGenerator @@ -25,6 +23,10 @@ # Unless specifically overridden, we expect tests to run under protobuf 4.x/5.x lib import google.protobuf +from temporalio.client import Client +from temporalio.testing import WorkflowEnvironment +from tests.helpers.worker import ExternalPythonWorker, ExternalWorker + protobuf_version = google.protobuf.__version__ if os.getenv("TEMPORAL_TEST_PROTO3"): assert protobuf_version.startswith( @@ -35,10 +37,6 @@ "5." ), f"Expected protobuf 4.x/5.x, got {protobuf_version}" -from temporalio.client import Client -from temporalio.testing import WorkflowEnvironment -from tests.helpers.worker import ExternalPythonWorker, ExternalWorker - def pytest_addoption(parser): parser.addoption( diff --git a/tests/test_client.py b/tests/test_client.py index 43ec631a..49a32ba1 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -381,7 +381,7 @@ async def test_query(client: Client, worker: ExternalWorker): await handle.result() assert "some query arg" == await handle.query("some query", "some query arg") # Try a query not on the workflow - with pytest.raises(WorkflowQueryFailedError) as err: + with pytest.raises(WorkflowQueryFailedError): await handle.query("does not exist") diff --git a/tests/test_converter.py b/tests/test_converter.py index 4cad639a..23f2838d 100644 --- a/tests/test_converter.py +++ b/tests/test_converter.py @@ -36,7 +36,6 @@ import temporalio.api.common.v1 import temporalio.common from temporalio.api.common.v1 import Payload, Payloads -from temporalio.api.common.v1 import Payload as AnotherNameForPayload from temporalio.api.failure.v1 import Failure from temporalio.common import RawValue from temporalio.converter import ( diff --git a/tests/testing/test_activity.py b/tests/testing/test_activity.py index 29b66c77..f00e492d 100644 --- a/tests/testing/test_activity.py +++ b/tests/testing/test_activity.py @@ -68,7 +68,7 @@ def via_thread(): while not activity.is_cancelled(): time.sleep(0.2) time.sleep(0.2) - except: + except BaseException: raise RuntimeError("Unexpected") except CancelledError: nonlocal properly_cancelled @@ -108,5 +108,5 @@ async def assert_equals(a: str, b: str) -> None: except Exception as err: actual_err = err - assert type(expected_err) == type(actual_err) + assert type(expected_err) is type(actual_err) assert str(expected_err) == str(actual_err) diff --git a/tests/worker/test_activity.py b/tests/worker/test_activity.py index b17a0650..b6037432 100644 --- a/tests/worker/test_activity.py +++ b/tests/worker/test_activity.py @@ -589,7 +589,7 @@ class SomeClass2: async def test_activity_type_hints(client: Client, worker: ExternalWorker): - activity_param1: SomeClass2 + activity_param1: Optional[SomeClass2] = None @activity.defn async def some_activity(param1: SomeClass2, param2: str) -> str: diff --git a/tests/worker/test_workflow.py b/tests/worker/test_workflow.py index 665a5393..af32821d 100644 --- a/tests/worker/test_workflow.py +++ b/tests/worker/test_workflow.py @@ -52,8 +52,6 @@ from temporalio.bridge.proto.workflow_completion import WorkflowActivationCompletion from temporalio.client import ( Client, - RPCError, - RPCStatusCode, WorkflowExecutionStatus, WorkflowFailureError, WorkflowHandle, @@ -222,7 +220,7 @@ async def test_workflow_info(client: Client, env: WorkflowEnvironment): assert info["retry_policy"] == json.loads( json.dumps(dataclasses.asdict(retry_policy), default=str) ) - assert uuid.UUID(info["run_id"]).version == 4 + assert uuid.UUID(info["run_id"]).version == 7 assert info["run_timeout"] is None datetime.fromisoformat(info["start_time"]) assert info["task_queue"] == worker.task_queue @@ -2184,7 +2182,7 @@ async def status() -> str: # Send stack trace query trace = await handle.query("__enhanced_stack_trace") - assert type(trace) == EnhancedStackTrace + assert type(trace) is EnhancedStackTrace assert "never_completing_coroutine" in [ loc.function_name for stack in trace.stacks for loc in stack.locations @@ -2225,7 +2223,7 @@ async def status() -> str: # test that a coroutine only has the source as its stack - assert type(trace) == EnhancedStackTrace + assert type(trace) is EnhancedStackTrace assert "never_completing_coroutine" in [ loc.function_name for stack in trace.stacks for loc in stack.locations diff --git a/tests/worker/workflow_sandbox/test_importer.py b/tests/worker/workflow_sandbox/test_importer.py index ee29d642..2d5627f4 100644 --- a/tests/worker/workflow_sandbox/test_importer.py +++ b/tests/worker/workflow_sandbox/test_importer.py @@ -19,7 +19,7 @@ def test_workflow_sandbox_importer_invalid_module(): with pytest.raises(RestrictedWorkflowAccessError) as err: with Importer(restrictions, RestrictionContext()).applied(): - import tests.worker.workflow_sandbox.testmodules.invalid_module + import tests.worker.workflow_sandbox.testmodules.invalid_module # noqa: F401 assert ( err.value.qualified_name == "tests.worker.workflow_sandbox.testmodules.invalid_module" diff --git a/tests/worker/workflow_sandbox/test_runner.py b/tests/worker/workflow_sandbox/test_runner.py index 14b2c94c..cf831149 100644 --- a/tests/worker/workflow_sandbox/test_runner.py +++ b/tests/worker/workflow_sandbox/test_runner.py @@ -7,6 +7,9 @@ import os import time import uuid + +# This used to fail because our __init__ couldn't handle metaclass init +import zipfile from dataclasses import dataclass from datetime import date, datetime, timedelta from enum import IntEnum @@ -33,9 +36,6 @@ # runtime only _ = os.name -# This used to fail because our __init__ couldn't handle metaclass init -import zipfile - class MyZipFile(zipfile.ZipFile): pass