diff --git a/.githooks/pre-commit b/.githooks/pre-commit deleted file mode 100755 index 20b10ff25..000000000 --- a/.githooks/pre-commit +++ /dev/null @@ -1,6 +0,0 @@ -#!/usr/bin/env python -import sys -from pylama.hook import git_hook - -if __name__ == '__main__': - sys.exit(git_hook()) diff --git a/.github/workflows/pylama.yml b/.github/workflows/pylint.yml similarity index 73% rename from .github/workflows/pylama.yml rename to .github/workflows/pylint.yml index 3ddd29218..3e2c871c4 100644 --- a/.github/workflows/pylama.yml +++ b/.github/workflows/pylint.yml @@ -1,4 +1,4 @@ -name: PyLama Lint +name: Pylint Lint on: # For manual exec @@ -11,13 +11,13 @@ on: branches: [main, develop] jobs: - pylama: + pylint: if: ${{ !(contains(github.event.pull_request.labels.*.name, 'WIP (no-ci)')) }} - name: PyLama Lint + name: Pylint Lint runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - name: Lint run: | - pip install pylama==8.4.1 pyflakes==3.0.1 pylint==2.15.9 pydocstyle==6.1.1 2>&1 >/dev/null - pylama beeflow/ + pip install pylint==3.2.7 2>&1 >/dev/null + pylint --rcfile=setup.cfg beeflow/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 000000000..d12dd5625 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,14 @@ +repos: +- repo: local + hooks: + - id: pylint + name: pylint + entry: pylint + language: system + types: [python] + args: + [ + "-rn", # Only display messages + "-sn", # Don't display the score + "--rcfile=setup.cfg", # Link to the config file + ] diff --git a/beeflow/client/bee_client.py b/beeflow/client/bee_client.py index 2a6a798a2..5de384df0 100644 --- a/beeflow/client/bee_client.py +++ b/beeflow/client/bee_client.py @@ -4,6 +4,11 @@ This script provides a client interface to the user to manage workflows. Capablities include submitting, starting, listing, pausing and cancelling workflows. """ + +# Disable W0511: This allows us to have TODOs in the code +# Disable R1732: Significant code restructuring required to fix +# pylint:disable=W0511,R1732 + import os import sys import logging @@ -34,7 +39,7 @@ from beeflow.common.db import bdb # Length of a shortened workflow ID -short_id_len = 6 #noqa: Not a constant +short_id_len = 6 # pylint: disable=C0103 # not a constant # Maximum length of a workflow ID MAX_ID_LEN = 32 @@ -167,7 +172,7 @@ def error_exit(msg, include_caller=True): raise ClientError(msg) from None -def error_handler(resp): # noqa (this is an error handler, it doesn't need to return an expression) +def error_handler(resp): # pylint: disable=R1710 # error handler doesn't need to return an expression """Handle a 500 error in a response.""" if resp.status_code != 500: return resp @@ -231,7 +236,7 @@ def get_wf_list(): def check_short_id_collision(): """Check short workflow IDs for colliions; increase short ID length if detected.""" - global short_id_len #noqa: Not a constant + global short_id_len workflow_list = get_wf_list() if workflow_list: while short_id_len < MAX_ID_LEN: @@ -470,13 +475,13 @@ def package(wf_path: pathlib.Path = typer.Argument(..., # Just use tar with subprocess. Python's tar library is not performant. return_code = subprocess.run(['tar', '-C', parent_dir, '-czf', tarball, wf_dir], check=True).returncode - package_path = package_dest.resolve()/tarball # noqa: Not an arithmetic operation + package_path = package_dest.resolve()/tarball # Get the curent working directory cwd = pathlib.Path().absolute() if package_dest != cwd: # Move the tarball if the directory it's wanted in is not in the current working directory - tarball_path = cwd/tarball # noqa: Not an arithmetic operation + tarball_path = cwd/tarball shutil.move(tarball_path, package_path) if return_code != 0: @@ -700,7 +705,7 @@ def reexecute(wf_name: str = typer.Argument(..., help='The workflow name'), except requests.exceptions.ConnectionError: error_exit('Could not reach WF Manager.') - if resp.status_code != requests.codes.created: #noqa: member does exist + if resp.status_code != requests.codes.created: # pylint: disable=E1101 error_exit(f"Reexecute for {wf_name} failed. Please check the WF Manager.") wf_id = resp.json()['wf_id'] @@ -765,7 +770,3 @@ def main(): if __name__ == "__main__": app() - -# Ignore W0511: This allows us to have TODOs in the code -# Ignore R1732: Significant code restructuring required to fix -# pylama:ignore=W0511,R1732 diff --git a/beeflow/client/core.py b/beeflow/client/core.py index f28e2dd42..b18e49c47 100644 --- a/beeflow/client/core.py +++ b/beeflow/client/core.py @@ -111,7 +111,7 @@ def poll(self): """Poll each process to check for errors, restart failed processes.""" # Max number of times a component can be restarted max_restarts = bc.get('DEFAULT', 'max_restarts') - for name in self.procs: # noqa no need to iterate with items() since self.procs may be set + for name in self.procs: # pylint: disable=C0206 # no need to iterate with items() since self.procs may be set component = self.components[name] if component['failed']: continue @@ -255,7 +255,7 @@ def start_slurm_restd(): # slurm_args = f'-s openapi/{openapi_version},openapi/db{openapi_version}' slurm_socket = paths.slurm_socket() subprocess.run(['rm', '-f', slurm_socket], check=True) - fp = open(slurmrestd_log, 'w', encoding='utf-8') # noqa + fp = open(slurmrestd_log, 'w', encoding='utf-8') # pylint: disable=R1732 cmd = ['slurmrestd'] cmd.extend(slurm_args.split()) cmd.append(f'unix:{slurm_socket}') @@ -277,7 +277,7 @@ def load_check_charliecloud(): if not shutil.which('ch-run'): lmod = os.environ.get('MODULESHOME') sys.path.insert(0, lmod + '/init') - from env_modules_python import module #noqa No need to import at top + from env_modules_python import module # pylint: disable=C0415 # No need to import at top module("load", "charliecloud") # Try loading the Charliecloud module then test again if not shutil.which('ch-run'): @@ -328,7 +328,7 @@ def check_dependencies(backend=False): # Check for the flux API if bc.get('DEFAULT', 'workload_scheduler') == 'Flux': try: - import flux # noqa needed to check whether flux api is actually installed + import flux # pylint: disable=W0611,C0415 # don't need to check whether flux api is actually installed except ModuleNotFoundError: warn('Failed to import flux Python API. Please make sure you can ' 'use flux in your environment.') @@ -382,7 +382,7 @@ def handle_client(self, server): def daemonize(mgr, base_components): """Start beeflow as a daemon, monitoring all processes.""" - def handle_terminate(signum, stack): # noqa + def handle_terminate(signum, stack): # pylint: disable=W0613 """Handle a terminate signal.""" # Kill all subprocesses mgr.kill() diff --git a/beeflow/cloud_launcher.py b/beeflow/cloud_launcher.py index b64430d49..ed7a4d98f 100644 --- a/beeflow/cloud_launcher.py +++ b/beeflow/cloud_launcher.py @@ -1,4 +1,9 @@ """BEE Cloud Installer Script.""" + +# Disable R1732: Significant code restructuring required to fix +# Disable W0511: This allows us to have TODOs in the code +# pylint:disable=W0511,R1732 + import argparse import subprocess import sys @@ -205,6 +210,3 @@ def main(): if __name__ == '__main__': main() -# Ignore R1732: Significant code restructuring required to fix -# Ignore W0511: This allows us to have TODOs in the code -# pylama:ignore=W0511,R1732 diff --git a/beeflow/common/api.py b/beeflow/common/api.py index 8d5f73310..a506972b1 100644 --- a/beeflow/common/api.py +++ b/beeflow/common/api.py @@ -7,6 +7,6 @@ class BeeApi(Api): """Wrapper around Flask-Restful's API to catch exceptions.""" - def handle_error(self, e): # noqa (conflict on naming in base class vs. following convention) + def handle_error(self, e): # pylint: disable=W0613 # conflict on naming in base class vs. following convention """Handle an error or exception.""" return make_response(jsonify(error=traceback.format_exc()), 500) diff --git a/beeflow/common/build/container_drivers.py b/beeflow/common/build/container_drivers.py index 555356dac..32093284f 100644 --- a/beeflow/common/build/container_drivers.py +++ b/beeflow/common/build/container_drivers.py @@ -3,6 +3,10 @@ All container-based build systems belong here. """ +# Disable W0231: linter doesn't know about abstract classes, +# it's ok to now call the parent __init__ +# pylint:disable=W0231 + import os import shutil import subprocess @@ -65,14 +69,14 @@ def __init__(self, task): try: requirement_docker_requirements = self.task.requirements['DockerRequirement'].keys() docker_requirements = docker_requirements.union(requirement_docker_requirements) - req_string = (f'{set(requirement_docker_requirements)}') + req_string = f'{set(requirement_docker_requirements)}' log.info(f'task {self.task.id} requirement DockerRequirements: {req_string}') except (TypeError, KeyError): log.info(f'task {self.task.name} {self.task.id} no DockerRequirements in requirement') try: hint_docker_requirements = self.task.hints['DockerRequirement'].keys() docker_requirements = docker_requirements.union(hint_docker_requirements) - hint_str = (f'{set(hint_docker_requirements)}') + hint_str = f'{set(hint_docker_requirements)}' log.info(f'task {self.task.name} {self.task.id} hint DockerRequirements: {hint_str}') except (TypeError, KeyError): log.info(f'task {self.task.name} {self.task.id} hints has no DockerRequirements') @@ -252,7 +256,7 @@ def process_docker_import(self, param_import=None): # Pull the image. file_name = crt_driver.get_ccname(import_input_path) - cmd = (f'ch-convert {import_input_path} {self.deployed_image_root}/{file_name}') + cmd = f'ch-convert {import_input_path} {self.deployed_image_root}/{file_name}' log.info(f'Docker import: Assuming container name is {import_input_path}. Correct?') return subprocess.run(cmd, check=True, shell=True) @@ -423,5 +427,3 @@ def process_docker_output_directory(self, param_output_directory=None): param_output_directory may be used to override DockerRequirement specs. """ -# Ignore W0231: linter doesn't know about abstract classes, it's ok to now call the parent __init__ -# pylama:ignore=W0231 diff --git a/beeflow/common/build_interfaces.py b/beeflow/common/build_interfaces.py index 1aa1944a7..00a20fa89 100644 --- a/beeflow/common/build_interfaces.py +++ b/beeflow/common/build_interfaces.py @@ -6,6 +6,9 @@ components of the gdb_interface as required. """ +# Disable W0703: Catching generic exception isn't a problem if we just want a descriptive report +# pylint:disable=W0703 + # from beeflow.common.gdb.gdb_interface import GraphDatabaseInterface # from beeflow.common.build.container_drivers import CharliecloudBuildDriver, # SingularityBuildDriver @@ -82,7 +85,3 @@ def build_main(task): log.info(f'{err}') build_main(local_task) - -# Ignore W0703: Catching generic exception isn't a problem if we just want a descriptive report -# Ignore C901: "'build_main' is too complex" - this function is just around 40 lines -# pylama:ignore=W0703,C901 diff --git a/beeflow/common/cli.py b/beeflow/common/cli.py index cbd223d55..615a5ee43 100644 --- a/beeflow/common/cli.py +++ b/beeflow/common/cli.py @@ -5,6 +5,6 @@ class NaturalOrderGroup(click.Group): """Natural ordering class for using with CLI code.""" - def list_commands(self, ctx): # noqa + def list_commands(self, ctx): """List the commands in order.""" return self.commands.keys() diff --git a/beeflow/common/cloud/__init__.py b/beeflow/common/cloud/__init__.py index 158c9481f..5bca1a24a 100644 --- a/beeflow/common/cloud/__init__.py +++ b/beeflow/common/cloud/__init__.py @@ -1,4 +1,8 @@ """Cloud init module.""" + +# Disable W0611: These are meant to be used by external code +# pylint:disable=W0611 + from beeflow.common.cloud import chameleoncloud from beeflow.common.cloud import openstack from beeflow.common.cloud import provider @@ -21,5 +25,3 @@ def get_provider(name, **kwargs): return providers[name](**kwargs) raise RuntimeError(f'Invalid provider "{name}"') -# Ignore W0611: These are meant to be used by external code -# pylama:ignore=W0611 diff --git a/beeflow/common/cloud/chameleoncloud.py b/beeflow/common/cloud/chameleoncloud.py index f506af458..54d506ef7 100644 --- a/beeflow/common/cloud/chameleoncloud.py +++ b/beeflow/common/cloud/chameleoncloud.py @@ -19,7 +19,7 @@ def create_from_template(self, template_file): 'Use the Horizon interface instead' ) - def get_ext_ip_addr(self, node_name): # noqa + def get_ext_ip_addr(self, node_name): # pylint: disable=W0613 """Get the external IP address of the node, if it has one.""" if self._stack_name is not None: stack = self._api.get_stack(self._stack_name) diff --git a/beeflow/common/cloud/google.py b/beeflow/common/cloud/google.py index 2b5f56a40..c8f52ca6e 100644 --- a/beeflow/common/cloud/google.py +++ b/beeflow/common/cloud/google.py @@ -19,7 +19,7 @@ def __init__(self, project, zone, **kwargs): def get_ext_ip_addr(self, node_name): """Get the external IP of this node (or None if no IP).""" - res = self._api.instances().get(instance=node_name, # noqa (can't find instances member) + res = self._api.instances().get(instance=node_name, project=self.project, zone=self.zone).execute() try: @@ -34,7 +34,7 @@ def setup_cloud(self, config): # This just creates instances one-by-one. There may be a better API call # to just create everything at once. for instance in config['instances']: - call = self._api.instances().insert(project=self.project, # noqa (can't find instances member) + call = self._api.instances().insert(project=self.project, zone=self.zone, body=instance) res = call.execute() print(res) diff --git a/beeflow/common/config_driver.py b/beeflow/common/config_driver.py index bdf4d5249..ae593833c 100644 --- a/beeflow/common/config_driver.py +++ b/beeflow/common/config_driver.py @@ -126,7 +126,7 @@ def get(cls, sec_name, opt_name): if cls.CONFIG is None: cls.init() try: - return cls.CONFIG[sec_name][opt_name] # noqa (this object is subscritable) + return cls.CONFIG[sec_name][opt_name] # pylint: disable=E1136 # object is subscritable except KeyError: raise RuntimeError( f'Option {sec_name}::{opt_name} was not found. Please contact ' @@ -359,7 +359,7 @@ def validate_chrun_opts(opts): info='scheduling algorithm to use', prompt=False) VALIDATOR.option('scheduler', 'default_algorithm', default='fcfs', choices=SCHEDULER_ALGORITHMS, prompt=False, - info=('default algorithm to use')) + info='default algorithm to use') def print_wrap(text, next_line_indent=''): @@ -605,7 +605,3 @@ def show(path: str = typer.Argument(default=USERCONFIG_FILE, print(f'# {path}') with open(path, encoding='utf-8') as fp: print(fp.read(), end='') -# Ignore C901: "'ConfigGenerator.choose_values' is too complex" - I disagree, if -# it's just based on LOC, then there are a number `print()` functions -# that are increasing the line count -# pylama:ignore=C901 diff --git a/beeflow/common/config_utils.py b/beeflow/common/config_utils.py index eb4073188..3f338df57 100644 --- a/beeflow/common/config_utils.py +++ b/beeflow/common/config_utils.py @@ -8,7 +8,7 @@ def filter_and_validate(config, validator): """Filter and validate the configuration file.""" default_keys = list(config['DEFAULT']) config = {sec_name: {key: config[sec_name][key] for key in config[sec_name] - if sec_name == 'DEFAULT' or key not in default_keys} # noqa + if sec_name == 'DEFAULT' or key not in default_keys} for sec_name in config} # Validate the config return validator.validate(config) diff --git a/beeflow/common/connection.py b/beeflow/common/connection.py index 2e5ee167f..194761111 100644 --- a/beeflow/common/connection.py +++ b/beeflow/common/connection.py @@ -27,7 +27,7 @@ def _full_url(self, path): def handle_error(self, resp): """Handle an error, if there is one.""" - if resp.status_code != requests.codes.okay: # noqa (pylama can't find the okay member) + if resp.status_code != requests.codes.okay: # pylint: disable=E1101 # pylint can't find the okay member return self._error_handler(resp) return resp diff --git a/beeflow/common/crt/charliecloud_driver.py b/beeflow/common/crt/charliecloud_driver.py index c9218d230..db32dcad0 100644 --- a/beeflow/common/crt/charliecloud_driver.py +++ b/beeflow/common/crt/charliecloud_driver.py @@ -42,7 +42,7 @@ def get_ccname(image_path): name = '.'.join(name) return name - def run_text(self, task): # noqa + def run_text(self, task): # pylint: disable=R0915 """Create text for Charliecloud batch script.""" os.makedirs(self.container_archive, exist_ok=True) log.info(f'Build container archive directory is: {self.container_archive}') @@ -148,5 +148,5 @@ def run_text(self, task): # noqa def build_text(self, userconfig, task): """Build text for Charliecloud batch script.""" task_args = task2arg(task) - text = (f'beeflow --build {userconfig} {task_args}\n') + text = f'beeflow --build {userconfig} {task_args}\n' return text diff --git a/beeflow/common/crt_interface.py b/beeflow/common/crt_interface.py index 2eb631b31..418fddca7 100644 --- a/beeflow/common/crt_interface.py +++ b/beeflow/common/crt_interface.py @@ -5,6 +5,9 @@ Default: 'CharliecloudDriver' class. """ +# Disable module imported but unused error. No way to know which crt will be needed +# pylint:disable=W0611 + from beeflow.common.config_driver import BeeConfig as bc from beeflow.common.crt.charliecloud_driver import CharliecloudDriver from beeflow.common.crt.singularity_driver import SingularityDriver @@ -40,5 +43,3 @@ def build_text(self, userconfig, task): :rtype: string """ return self._crt_driver.build_text(userconfig, task) -# Ignore module imported but unused error. No way to know which crt will be needed -# pylama:ignore=W0611 diff --git a/beeflow/common/db/client_db.py b/beeflow/common/db/client_db.py index 2443e6ab4..69cd44bdf 100644 --- a/beeflow/common/db/client_db.py +++ b/beeflow/common/db/client_db.py @@ -10,7 +10,7 @@ class ClientInfo: def __init__(self, db_file): """Initialize info and db file.""" - self.Info = namedtuple("Info", "id hostname") # noqa Snake Case + self.Info = namedtuple("Info", "id hostname") # pylint: disable=C0103 self.db_file = db_file def set_hostname(self, new_hostname): diff --git a/beeflow/common/db/tm_db.py b/beeflow/common/db/tm_db.py index 8e9666dc1..8b82d087a 100644 --- a/beeflow/common/db/tm_db.py +++ b/beeflow/common/db/tm_db.py @@ -16,7 +16,7 @@ class SubmitQueue: def __init__(self, db_file): """Construct a submit queue handler.""" self.db_file = db_file - self.Job = namedtuple("Job", "id task") #noqa Snake Case + self.Job = namedtuple("Job", "id task") # pylint: disable=C0103 def __iter__(self): """Create an iterator for going over all elements.""" @@ -62,7 +62,7 @@ class JobQueue: def __init__(self, db_file): """Construct a job queue handler.""" self.db_file = db_file - self.Job = namedtuple("Task", "id task job_id job_state") # noqa Snake Case + self.Job = namedtuple("Task", "id task job_id job_state") # pylint: disable=C0103 def __iter__(self): """Create an iterator for going over all elements in the queue.""" diff --git a/beeflow/common/db/wfm_db.py b/beeflow/common/db/wfm_db.py index 63cc87456..db3f5a0f9 100644 --- a/beeflow/common/db/wfm_db.py +++ b/beeflow/common/db/wfm_db.py @@ -10,7 +10,11 @@ class WorkflowInfo: def __init__(self, db_file): """Initialize Info and db file.""" - self.Info = namedtuple("Info", "id wfm_port tm_port sched_port num_workflows bolt_port http_port https_port gdb_pid") # noqa Snake Case + self.Info = namedtuple( # pylint: disable=C0103 + "Info", + "id wfm_port tm_port sched_port num_workflows " + "bolt_port http_port https_port gdb_pid" + ) self.db_file = db_file def set_port(self, component, new_port): @@ -63,9 +67,9 @@ class Workflows: def __init__(self, db_file): """Initialize Task, db_file, and Workflow object.""" - self.Task = namedtuple("Task", "id task_id workflow_id name resource state slurm_id") #noqa + self.Task = namedtuple("Task", "id task_id workflow_id name resource state slurm_id") # pylint: disable=C0103 self.db_file = db_file - self.Workflow = namedtuple("Workflow", "id workflow_id name state run_dir") #noqa + self.Workflow = namedtuple("Workflow", "id workflow_id name state run_dir") # pylint: disable=C0103 def get_workflow(self, workflow_id): """Return a workflow object.""" diff --git a/beeflow/common/deps/container_manager.py b/beeflow/common/deps/container_manager.py index 8d3857844..72abcab47 100755 --- a/beeflow/common/deps/container_manager.py +++ b/beeflow/common/deps/container_manager.py @@ -5,10 +5,10 @@ import os import shutil import subprocess +from celery import shared_task # pylint: disable=W0611 # pylint can't find celery from beeflow.common.config_driver import BeeConfig as bc from beeflow.common import paths -from celery import shared_task #noqa pylama can't find celery class NoContainerRuntime(Exception): diff --git a/beeflow/common/deps/neo4j_manager.py b/beeflow/common/deps/neo4j_manager.py index b921326f7..fd0ccab2f 100644 --- a/beeflow/common/deps/neo4j_manager.py +++ b/beeflow/common/deps/neo4j_manager.py @@ -145,7 +145,7 @@ def create_database(): """Create the neo4j database and return the process.""" try: command = ['neo4j', 'console'] - proc = subprocess.Popen([ #noqa can't use with because returning + proc = subprocess.Popen([ # pylint: disable=R1732 # can't use with because returning "ch-run", "--set-env=" + CONTAINER_PATH + "/ch/environment", "--set-env=apoc.export.file.enabled=true", diff --git a/beeflow/common/gdb/neo4j_cypher.py b/beeflow/common/gdb/neo4j_cypher.py index 9e25bccb4..77d705590 100644 --- a/beeflow/common/gdb/neo4j_cypher.py +++ b/beeflow/common/gdb/neo4j_cypher.py @@ -11,7 +11,7 @@ def create_bee_node(tx): This node connects to all workflows and allows them to exist in the same graph """ - bee_query = ("MERGE (b:BEE {name:'Head'})") + bee_query = "MERGE (b:BEE {name:'Head'})" tx.run(bee_query) diff --git a/beeflow/common/gdb/neo4j_driver.py b/beeflow/common/gdb/neo4j_driver.py index 5ab01b491..3aa04f393 100644 --- a/beeflow/common/gdb/neo4j_driver.py +++ b/beeflow/common/gdb/neo4j_driver.py @@ -5,6 +5,9 @@ either standardized or read from a config file. """ +# Disable E1129: External module is missing proper resource context manager methods. +# pylint:disable=E1129 + from neo4j import GraphDatabase as Neo4jDatabase from neo4j.exceptions import ServiceUnavailable @@ -39,7 +42,7 @@ class Neo4jDriver(GraphDatabaseDriver): def __new__(cls): """Create or get the instance of Neo4j database driver.""" if not hasattr(cls, 'instance'): - cls.instance = super(Neo4jDriver, cls).__new__(cls) #noqa cls causing linting errors + cls.instance = super(Neo4jDriver, cls).__new__(cls) # pylint: disable=E1120 return cls.instance def connect(self, user=DEFAULT_USER, password=DEFAULT_PASSWORD, **kwargs): @@ -58,7 +61,7 @@ def connect(self, user=DEFAULT_USER, password=DEFAULT_PASSWORD, **kwargs): uri = f"bolt://{db_hostname}:{bolt_port}" try: # Connect to the Neo4j database using the Neo4j proprietary driver - self._driver = Neo4jDatabase.driver(uri, auth=(user, password)) #noqa outside init + self._driver = Neo4jDatabase.driver(uri, auth=(user, password)) # pylint: disable=W0201 # Checks the connection and returns ServiceUnavailable if something is wrong self._driver.verify_connectivity() except ServiceUnavailable as sue: @@ -625,6 +628,3 @@ def _reconstruct_metadata(metadata_record): :rtype: dict """ return {key: val for key, val in metadata_record.items() if key != "state"} - -# Ignore E1129: External module is missing proper resource context manager methods. -# pylama:ignore=E1129 diff --git a/beeflow/common/integration_test.py b/beeflow/common/integration_test.py index f26cb96bd..0feb5d9c7 100644 --- a/beeflow/common/integration_test.py +++ b/beeflow/common/integration_test.py @@ -1,4 +1,9 @@ """BEE integration tests.""" + +# Disable W0231: This is a user-defined exception and I don't think we need to call +# __init__ on the base class. +# pylint:disable=W0231 + import glob from pathlib import Path import os @@ -286,10 +291,10 @@ def test_input_callback(arg): return arg.split(',') if arg is not None else None -def main(tests = typer.Option(None, '--tests', '-t', # noqa (conflict on '=' sign) +def main(tests = typer.Option(None, '--tests', '-t', callback=test_input_callback, help='tests run as comma-separated string'), - show_tests: bool = typer.Option(False, '--show-tests', '-s', # noqa (conflict on '=' sign) + show_tests: bool = typer.Option(False, '--show-tests', '-s', help='show a list of all tests'), timeout: int = typer.Option(utils.TIMEOUT, '--timeout', help='workflow timeout in seconds')): @@ -308,6 +313,3 @@ def main(tests = typer.Option(None, '--tests', '-t', # noqa (conflict on '=' si # General clean up os.remove(generated_workflows.DOCKER_FILE_PATH) sys.exit(ret) -# Ignore W0231: This is a user-defined exception and I don't think we need to call -# __init__ on the base class. -# pylama:ignore=W0231 diff --git a/beeflow/common/parser/__init__.py b/beeflow/common/parser/__init__.py index 2b85aecf3..f3d8132da 100644 --- a/beeflow/common/parser/__init__.py +++ b/beeflow/common/parser/__init__.py @@ -1,3 +1,3 @@ """Init code for parser.""" -from beeflow.common.parser.parser import CwlParser, CwlParseError # noqa +from beeflow.common.parser.parser import CwlParser, CwlParseError diff --git a/beeflow/common/parser/parser.py b/beeflow/common/parser/parser.py index 840a7d3d2..64bc9a3ef 100644 --- a/beeflow/common/parser/parser.py +++ b/beeflow/common/parser/parser.py @@ -12,7 +12,7 @@ import traceback import yaml import cwl_utils.parser.cwl_v1_2 as cwl_parser -from schema_salad.exceptions import ValidationException # noqa (pylama can't find the exception) +from schema_salad.exceptions import ValidationException from beeflow.common.wf_data import (Workflow, Task, @@ -352,13 +352,13 @@ def parse_requirements(self, requirements, as_hints=False): if 'shell' in items: self._read_requirement_file('pre_script', items) else: - msg = f'pre script enabled but shell option undefined in cwl file.' #noqa + msg = f'pre script enabled but shell option undefined in cwl file.' # pylint: disable=W1309 raise CwlParseError(msg) from None if 'post_script' in items and items['enabled']: if 'shell' in items: self._read_requirement_file('post_script', items) else: - msg = f'post script enabled but shell option undefined in cwl file.' #noqa + msg = f'post script enabled but shell option undefined in cwl file.' # pylint: disable=W1309 raise CwlParseError(msg) from None if 'beeflow:bindMounts' in items: self._read_requirement_file('beeflow:bindMounts', items) diff --git a/beeflow/common/wf_data.py b/beeflow/common/wf_data.py index 1777c339f..f83c6d3db 100644 --- a/beeflow/common/wf_data.py +++ b/beeflow/common/wf_data.py @@ -1,4 +1,5 @@ """Defines data structures for holding task and workflow data.""" + from collections import namedtuple from uuid import uuid4 from copy import deepcopy @@ -332,7 +333,3 @@ def command(self): break return command -# Ignore C901: "'Task.command' is too complex" - right now this function is -# under 50 lines of code. If we add any more lines I think it -# might be best to break it up, but for now it seems fine. -# pylama:ignore=C901 diff --git a/beeflow/common/worker/__init__.py b/beeflow/common/worker/__init__.py index 4afc064d1..683f09256 100644 --- a/beeflow/common/worker/__init__.py +++ b/beeflow/common/worker/__init__.py @@ -1,6 +1,6 @@ """Init file for the worker package.""" -from beeflow.common.worker.worker import WorkerError # noqa: this is imported for external code +from beeflow.common.worker.worker import WorkerError from beeflow.common.worker.slurm_worker import SlurmWorker from beeflow.common.worker.lsf_worker import LSFWorker from beeflow.common.worker.flux_worker import FluxWorker diff --git a/beeflow/common/worker/flux_worker.py b/beeflow/common/worker/flux_worker.py index 98624f052..1c9a19ba7 100644 --- a/beeflow/common/worker/flux_worker.py +++ b/beeflow/common/worker/flux_worker.py @@ -1,5 +1,9 @@ """Flux worker interface.""" +# Disable W0511: TODO's are needed here to indicate parts of the code that may +# need more work or thought +# pylint:disable=W0511 + import io import os from beeflow.common import log as bee_logging @@ -30,8 +34,8 @@ def __init__(self, **kwargs): """Initialize the flux worker object.""" super().__init__(**kwargs) # Only try to import the Flux API if we need it - import flux # noqa this is necessary since flux may not be installed - from flux import job # noqa + import flux # pylint: disable=C0415 # this is necessary since flux may not be installed + from flux import job # pylint: disable=C0415 self.flux = flux self.job = job @@ -149,6 +153,3 @@ def query_task(self, job_id): # Note: using 'status' here instead of 'state' return BEE_STATES[info['status']] -# Ignoring W0511: TODO's are needed here to indicate parts of the code that may -# need more work or thought -# pylama:ignore=W0511 diff --git a/beeflow/common/worker/slurm_worker.py b/beeflow/common/worker/slurm_worker.py index 83ab7c1eb..ef47ebac7 100644 --- a/beeflow/common/worker/slurm_worker.py +++ b/beeflow/common/worker/slurm_worker.py @@ -137,7 +137,7 @@ def write_script(self, task): def submit_job(self, script): """Worker submits job-returns (job_id, job_state).""" - res = subprocess.run(['sbatch', '--parsable', script], text=True, # noqa if we use check=True here, then we can't see stderr + res = subprocess.run(['sbatch', '--parsable', script], text=True, # pylint: disable=W1510 stdout=subprocess.PIPE, stderr=subprocess.PIPE) if res.returncode != 0: raise WorkerError(f'Failed to submit job: {res.stderr}') @@ -202,7 +202,7 @@ def cancel_task(self, job_id): try: data = resp.json() check_slurm_error(data, errmsg) - except requests.exceptions.JSONDecodeError as exc: # noqa requests is not installed in CI + except requests.exceptions.JSONDecodeError as exc: raise WorkerError(errmsg) from exc job_state = "CANCELLED" return job_state diff --git a/beeflow/common/worker/worker.py b/beeflow/common/worker/worker.py index aa89de7ca..09d9cb8b9 100644 --- a/beeflow/common/worker/worker.py +++ b/beeflow/common/worker/worker.py @@ -1,5 +1,8 @@ """Abstract base class for worker, the workload manager.""" +# Disable W0511: This allows us to have TODOs in the code +# pylint:disable=W0511 + from abc import ABC, abstractmethod import os from beeflow.common import log as bee_logging @@ -93,6 +96,3 @@ def query_task(self, job_id): :type job_id: int :rtype: string """ - -# Ignore W0511: This allows us to have TODOs in the code -# pylama:ignore=W0511 diff --git a/beeflow/common/worker_interface.py b/beeflow/common/worker_interface.py index bcc7e532c..f10437ad0 100644 --- a/beeflow/common/worker_interface.py +++ b/beeflow/common/worker_interface.py @@ -4,6 +4,9 @@ the abstract base class 'Worker'. Default: 'SlurmWorker' class. """ +# Disable W0611 module imported but unused error; unsure which workload scheduler will be needed +# pylint:disable=W0611 + from beeflow.common.worker.slurm_worker import SlurmWorker from beeflow.common.worker.lsf_worker import LSFWorker @@ -50,5 +53,3 @@ def query_task(self, job_id): :rtype: tuple (int, string) """ return self._worker.query_task(job_id) -# Ignore W0611 module imported but unused error; unsure which workload scheduler will be needed -# pylama:ignore=W0611 diff --git a/beeflow/data/cwl/bee_workflows/pennant-build/graph_pennant.py b/beeflow/data/cwl/bee_workflows/pennant-build/graph_pennant.py index c1fc92a56..98ee4c5b2 100644 --- a/beeflow/data/cwl/bee_workflows/pennant-build/graph_pennant.py +++ b/beeflow/data/cwl/bee_workflows/pennant-build/graph_pennant.py @@ -1,4 +1,8 @@ """Graph the output of a PENNANT workflow.""" + +# Disable C0103: This is just a simple script, not all globals should be UPPER_CASE here +# pylint:disable=C0103 + import re import sys import matplotlib.pyplot as plt @@ -38,6 +42,3 @@ ax.set_ylabel('Average wall time for cycle') # Save to a png file fig.savefig('graph.png') - -# Ignore C0103: This is just a simple script, not all globals should be UPPER_CASE here -# pylama:ignore=C0103 diff --git a/beeflow/data/cwl/cwl_validation/ml-workflow/machine_learning/decision_tree.py b/beeflow/data/cwl/cwl_validation/ml-workflow/machine_learning/decision_tree.py index 144f32e07..4c30deafc 100644 --- a/beeflow/data/cwl/cwl_validation/ml-workflow/machine_learning/decision_tree.py +++ b/beeflow/data/cwl/cwl_validation/ml-workflow/machine_learning/decision_tree.py @@ -1,4 +1,8 @@ """Decision Tree.""" + +# Disable preserving code for now +# pylint:disable=C0103,R1732,W0612,W0621 + # import json import pickle import click @@ -32,5 +36,3 @@ def reg(x1): if __name__ == '__main__': reg(x1=1) -# Ignores preserving code for now -# pylama:ignore=C0103,R1732,W0612,W0621 diff --git a/beeflow/data/cwl/cwl_validation/ml-workflow/machine_learning/linear_regression.py b/beeflow/data/cwl/cwl_validation/ml-workflow/machine_learning/linear_regression.py index 4cae57e02..c4d7aae73 100644 --- a/beeflow/data/cwl/cwl_validation/ml-workflow/machine_learning/linear_regression.py +++ b/beeflow/data/cwl/cwl_validation/ml-workflow/machine_learning/linear_regression.py @@ -1,4 +1,8 @@ """Linear Regression.""" + +# Disable preserving code for now +# pylint:disable=C0103,R1732,W0612,W0621 + import json import pickle import click @@ -41,5 +45,3 @@ def reg(x1): if __name__ == '__main__': reg(x1=1) -# Ignores preserving code for now -# pylama:ignore=C0103,R1732,W0612,W0621 diff --git a/beeflow/data/cwl/cwl_validation/ml-workflow/machine_learning/predict_code.py b/beeflow/data/cwl/cwl_validation/ml-workflow/machine_learning/predict_code.py index e93c61b25..c7f0af0b5 100644 --- a/beeflow/data/cwl/cwl_validation/ml-workflow/machine_learning/predict_code.py +++ b/beeflow/data/cwl/cwl_validation/ml-workflow/machine_learning/predict_code.py @@ -1,4 +1,8 @@ """Predict Code.""" + +# Disable preserving code for now +# pylint:disable=C0103,R1732 + import json import pickle import click @@ -37,5 +41,3 @@ def pred(e, i, t): if __name__ == '__main__': pred(e=4, i=5, t=6) -# Ignores preserving code for now -# pylama:ignore=C0103,R1732,C0501 diff --git a/beeflow/data/cwl/cwl_validation/ml-workflow/machine_learning/read_dataset.py b/beeflow/data/cwl/cwl_validation/ml-workflow/machine_learning/read_dataset.py index 06a8c9f9c..39731694e 100644 --- a/beeflow/data/cwl/cwl_validation/ml-workflow/machine_learning/read_dataset.py +++ b/beeflow/data/cwl/cwl_validation/ml-workflow/machine_learning/read_dataset.py @@ -1,4 +1,8 @@ """Read Data Set.""" + +# Disables preserving code for now +# pylint:disable=C0103,R1732,W0612 + import pickle import click # import json @@ -32,5 +36,3 @@ def reader(y3): if __name__ == '__main__': reader(y3="") -# Ignores preserving code for now -# pylama:ignore=C0103,R1732,W0612 diff --git a/beeflow/data/dockerfiles/pennant-graph/graph_pennant.py b/beeflow/data/dockerfiles/pennant-graph/graph_pennant.py index c1fc92a56..98ee4c5b2 100644 --- a/beeflow/data/dockerfiles/pennant-graph/graph_pennant.py +++ b/beeflow/data/dockerfiles/pennant-graph/graph_pennant.py @@ -1,4 +1,8 @@ """Graph the output of a PENNANT workflow.""" + +# Disable C0103: This is just a simple script, not all globals should be UPPER_CASE here +# pylint:disable=C0103 + import re import sys import matplotlib.pyplot as plt @@ -38,6 +42,3 @@ ax.set_ylabel('Average wall time for cycle') # Save to a png file fig.savefig('graph.png') - -# Ignore C0103: This is just a simple script, not all globals should be UPPER_CASE here -# pylama:ignore=C0103 diff --git a/beeflow/scheduler/algorithms.py b/beeflow/scheduler/algorithms.py index 5c7a09b1c..6099a6583 100644 --- a/beeflow/scheduler/algorithms.py +++ b/beeflow/scheduler/algorithms.py @@ -3,6 +3,9 @@ Code implementing scheduling algorithms, such as FCFS, Backfill, etc. """ +# Disable W0511: This allows us to have TODOs in the code +# pylint:disable=W0511 + import abc import os import time @@ -238,7 +241,7 @@ def schedule_all(self, tasks, resources): } -def load(algorithm=None, **kwargs): # noqa ('algorithm' may be used in the future) +def load(algorithm=None, **kwargs): # pylint: disable=W0613 #'algorithm' may be used in the future """Load data needed by the algorithms. Load data needed by algorithms, if necessary. @@ -262,6 +265,3 @@ def choose(algorithm=None, default_algorithm=None, **kwargs): if algorithm is not None: cls = algorithm_objects[algorithm] return AlgorithmLogWrapper(cls, **kwargs) - -# Ignore W0511: This allows us to have TODOs in the code -# pylama:ignore=W0511 diff --git a/beeflow/scheduler/resource_allocation.py b/beeflow/scheduler/resource_allocation.py index 80d586c43..87cf639c7 100644 --- a/beeflow/scheduler/resource_allocation.py +++ b/beeflow/scheduler/resource_allocation.py @@ -1,4 +1,8 @@ """Task allocator code.""" + +# Disable W0511: This allows us to have TODOs in the code +# pylint:disable=W0511 + from beeflow.scheduler import serializable @@ -259,6 +263,3 @@ def decode(data): :type data: dict """ return Allocation(**data) - -# Ignore W0511: This allows us to have TODOs in the code -# pylama:ignore=W0511 diff --git a/beeflow/scheduler/scheduler.py b/beeflow/scheduler/scheduler.py index 0e64c390e..0f377c825 100644 --- a/beeflow/scheduler/scheduler.py +++ b/beeflow/scheduler/scheduler.py @@ -1,6 +1,9 @@ #!/usr/bin/env python3 """REST Interface for the BEE Scheduler.""" +# Disable W0511: This allows us to have TODOs in the code +# pylint:disable=W0511 + import argparse import os @@ -49,7 +52,7 @@ class WorkflowJobHandler(Resource): """Schedule jobs for a specific workflow with the current resources.""" @staticmethod - def put(workflow_name): # noqa ('workflow_name' may be used in the future) + def put(workflow_name): # pylint: disable=W0613 # 'workflow_name' may be used in the future """Schedules a new list of independent tasks with available resources.""" db = connect_db(sched_db, db_path) data = request.json @@ -93,10 +96,10 @@ def load_config_values(): conf = argparse.Namespace(**conf) log.info('Config = [') - log.info(f'\talloc_logfile = {conf.alloc_logfile}') # noqa pylama is wrong here + log.info(f'\talloc_logfile = {conf.alloc_logfile}') # pylint: disable=E1101 log.info(f'\talgorithm = {conf.algorithm}') log.info(f'\tdefault_algorithm = {conf.default_algorithm}') - log.info(f'\tworkdir = {conf.workdir}') # noqa + log.info(f'\tworkdir = {conf.workdir}') # pylint: disable=E1101 # pylint is wrong here log.info(']') return conf @@ -112,8 +115,5 @@ def create_app(): # Create the scheduler workdir, if necessary # sched_listen_port = wf_utils.get_open_port() # wf_db.set_sched_port(sched_listen_port) - os.makedirs(conf.workdir, exist_ok=True) # noqa + os.makedirs(conf.workdir, exist_ok=True) # pylint: disable=E1101 return flask_app - -# Ignore W0511: This allows us to have TODOs in the code -# pylama:ignore=W0511 diff --git a/beeflow/task_manager/background.py b/beeflow/task_manager/background.py index 58da80036..5a6a175d8 100644 --- a/beeflow/task_manager/background.py +++ b/beeflow/task_manager/background.py @@ -42,7 +42,7 @@ def submit_task(db, worker, task): job_state = 'BUILD_FAIL' log.error(f'Failed to build container for {task.name}: {err}') log.error(f'{task.name} state: {job_state}') - except Exception as err: # noqa (we have to catch everything here) + except Exception as err: # pylint: disable=W0718 # we have to catch everything here # Set job state to failed job_state = 'SUBMIT_FAIL' log.error(f'Task Manager submit task {task.name} failed! \n {err}') diff --git a/beeflow/task_manager/task_actions.py b/beeflow/task_manager/task_actions.py index 505f158c0..8b0c4635c 100644 --- a/beeflow/task_manager/task_actions.py +++ b/beeflow/task_manager/task_actions.py @@ -24,7 +24,7 @@ def delete(): log.info(f"Cancelling {name} with job_id: {job_id}") try: job_state = worker.cancel_task(job_id) - except Exception as err: # noqa (we have to catch everything here) + except Exception as err: # pylint: disable=W0718 # we have to catch everything here log.error(err) log.error(traceback.format_exc()) job_state = 'ZOMBIE' diff --git a/beeflow/tests/mocks.py b/beeflow/tests/mocks.py index 6643f3975..49586350e 100644 --- a/beeflow/tests/mocks.py +++ b/beeflow/tests/mocks.py @@ -32,11 +32,11 @@ def resume_workflow(self): """Resume a workflow.""" return - def reset_workflow(self, wf_id): #noqa + def reset_workflow(self, wf_id): # pylint: disable=W0613 # not using wf_id in mock """Reset a workflow.""" - wf_id = 0 # noqa + wf_id = 0 - def get_dependent_tasks(self, task): # noqa + def get_dependent_tasks(self, task): # pylint: disable=W0613 """Get depdendent states.""" return [MockTask()] @@ -48,7 +48,7 @@ def set_task_metadata(self, task, metadata): """Set the metadata for this task.""" task.metadata = metadata - def get_task_by_id(self, task_id): # noqa + def get_task_by_id(self, task_id): # pylint: disable=W0613 """Return a mock task from an ID.""" return MockTask() @@ -68,7 +68,7 @@ def get_ready_tasks(self): def workflow_initialized(self): """Fake that the workflow has been initialized.""" - def set_task_state(self, task, job_state): # noqa + def set_task_state(self, task, job_state): """Set the state of a task.""" task.state = job_state @@ -76,7 +76,7 @@ def workflow_loaded(self): """Fake workflow being loaded.""" return self._loaded - def initialize_workflow(self, wf_id, wf_name, inputs, outputs, req=None, hints=None): # noqa + def initialize_workflow(self, wf_id, wf_name, inputs, outputs, req=None, hints=None): # pylint: disable=W0613 """Initialize the workflow.""" self._loaded = True @@ -97,17 +97,17 @@ def get_workflow(self): """Get a list of workflows.""" return None, [MockTask("task1"), MockTask("task2")] - def get_task_state(self, task_name): # noqa + def get_task_state(self, task_name): # pylint: disable=W0613 """Returns the task state.""" return "RUNNING" def execute_workflow(self): """Fake executing a workflow.""" - pass # noqa + pass # pylint: disable=W0107 def export_graphml(self): """Fake exporting a graphml of a workflow.""" - pass # noqa + pass # pylint: disable=W0107 class MockGDBDriver: @@ -146,22 +146,22 @@ def _is_ready(self, task_id): return all(self.task_states[task_dep_id] == 'COMPLETED' for task_dep_id in task_deps) - def execute_workflow(self, workflow_id): #noqa not using parameter in mock + def execute_workflow(self, workflow_id): # pylint: disable=W0613 # not using param in mock """Begin execution of the loaded workflow.""" self.workflow_state = 'RUNNING' for task_id in self.task_states: if self._is_ready(task_id): self.task_states[task_id] = 'READY' - def pause_workflow(self, workflow_id): #noqa not using parameter in mock + def pause_workflow(self, workflow_id): # pylint: disable=W0613 """Pause execution of a running workflow.""" self.workflow_state = 'PAUSED' - def resume_workflow(self, workflow_id): #noqa not using parameter in mock + def resume_workflow(self, workflow_id): # pylint: disable=W0613 """Resume execution of a running workflow.""" self.workflow_state = 'RESUME' - def reset_workflow(self, old_id, new_id): #noqa not using parameter in mock + def reset_workflow(self, old_id, new_id): # pylint: disable=W0613 """Reset the execution state and ID of a workflow.""" self.workflow = deepcopy(self.workflow) self.workflow.id = new_id @@ -182,7 +182,7 @@ def load_task(self, task, task_state): for outp in task.outputs: self.outputs[task.id][outp.id] = outp - def initialize_ready_tasks(self, workflow_id): #noqa not using parameter in mock + def initialize_ready_tasks(self, workflow_id): # pylint: disable=W0613 """Set runnable tasks in a workflow to ready.""" for task_id in self.tasks: if self._is_ready(task_id) and self.task_states[task_id] == 'WAITING': @@ -201,27 +201,27 @@ def get_task_by_id(self, task_id): """Return a workflow Task given its ID.""" return self.tasks[task_id] - def get_workflow_description(self, workflow_id): #noqa not using parameter in mock + def get_workflow_description(self, workflow_id): # pylint: disable=W0613 """Return the workflow description from the graph database.""" return deepcopy(self.workflow) - def get_workflow_state(self, workflow_id): #noqa not using parameter in mock + def get_workflow_state(self, workflow_id): # pylint: disable=W0613 """Return workflow's current state.""" return self.workflow_state - def set_workflow_state(self, workflow_id, state): #noqa not using parameter in mock + def set_workflow_state(self, workflow_id, state): # pylint: disable=W0613 """Return workflow's current state.""" self.workflow_state = state - def get_workflow_tasks(self, workflow_id): #noqa not using parameter in mock + def get_workflow_tasks(self, workflow_id): # pylint: disable=W0613 """Return a workflow's tasks from the graph database.""" return list(self.tasks.values()) - def get_workflow_requirements_and_hints(self, workflow_id): #noqa not using parameter in mock + def get_workflow_requirements_and_hints(self, workflow_id): # pylint: disable=W0613 """Return a tuple containing a list of requirements and a list of hints.""" return (None, None) - def get_ready_tasks(self, workflow_id): #noqa not using parameter in mock + def get_ready_tasks(self, workflow_id): # pylint: disable=W0613 """Return the tasks in a workflow with state 'READY'.""" return [task for task_id, task in self.tasks.items() if self.task_states[task_id] == 'READY'] @@ -254,7 +254,7 @@ def get_task_input(self, task, input_id): """Get a task input object.""" inp = self.inputs[task.id][input_id] try: - inp.id # noqa (trying to get an AttributeError here) + inp.id # pylint: disable=W0104 #trying to get an AttributeError here return inp except AttributeError: return StepInput(input_id, 'File', inp, @@ -293,7 +293,7 @@ def evaluate_expression(self, task, id_, output): step_inp.position, step_inp.value_from) - def workflow_completed(self, workflow_id): #noqa not using parameter in mock + def workflow_completed(self, workflow_id): # pylint: disable=W0613 """Return true if all of a workflow's final tasks have completed, else false.""" return all(state == 'COMPLETED' for state in self.task_states.values()) @@ -301,9 +301,9 @@ def close(self): """Close the connection to the graph database.""" -def mock_create_image(): # noqa +def mock_create_image(): """Fake image creation.""" - pass # noqa + pass # pylint: disable=W0107 class MockCwlParser: @@ -313,7 +313,7 @@ def __init__(self, bolt_port): """Need a port.""" self.bolt_port = bolt_port - def parse_workflow(self, wf_id, cwl_path, yaml_file=None): # noqa + def parse_workflow(self, wf_id, cwl_path, yaml_file=None): # pylint: disable=W0613 """Parse the workflow.""" return MockWFI() @@ -321,15 +321,15 @@ def parse_workflow(self, wf_id, cwl_path, yaml_file=None): # noqa class MockWorkerSubmission: """Mock Worker during submission.""" - def submit_task(self, task): # noqa + def submit_task(self, task): # pylint: disable=W0613 """Return submission.""" return 1, 'PENDING' - def query_task(self, job_id): #noqa + def query_task(self, job_id): # pylint: disable=W0613 """Return state of task.""" return 'RUNNING' - def cancel_task(self, job_id): # noqa + def cancel_task(self, job_id): # pylint: disable=W0613 """Return cancelled status""" return 'CANCELLED' @@ -337,15 +337,15 @@ def cancel_task(self, job_id): # noqa class MockWorkerCompletion: """Mock Worker after completion.""" - def submit_task(self, task): #noqa + def submit_task(self, task): # pylint: disable=W0613 """Submit a task.""" return 1, 'PENDING' - def query_task(self, job_id): #noqa + def query_task(self, job_id): # pylint: disable=W0613 """Submit a task.""" return 'COMPLETED' - def cancel_task(self, job_id): #noqa + def cancel_task(self, job_id): # pylint: disable=W0613 """Cancel a task.""" return 'CANCELLED' @@ -363,21 +363,21 @@ def json(): return '{}' -def mock_put(url, params=None, **kwargs): # noqa +def mock_put(url, params=None, **kwargs): # pylint: disable=W0613 """Fake put.""" return MockResponse(200) -def mock_post(url, params=None, **kwargs): # noqa +def mock_post(url, params=None, **kwargs): # pylint: disable=W0613 """Fake post.""" return MockResponse(200) -def mock_get(url, params=None, **kwargs): # noqa +def mock_get(url, params=None, **kwargs): # pylint: disable=W0613 """Fake get.""" return MockResponse(200) -def mock_delete(url, params=None, **kwargs): # noqa +def mock_delete(url, params=None, **kwargs): # pylint: disable=W0613 """Fake delete.""" return MockResponse(200) diff --git a/beeflow/tests/test_config_validator.py b/beeflow/tests/test_config_validator.py index 32677d3c1..9fa703513 100644 --- a/beeflow/tests/test_config_validator.py +++ b/beeflow/tests/test_config_validator.py @@ -6,10 +6,10 @@ def test_empty(): """Test an empty config.""" validator = ConfigValidator(description='empty test case') - assert validator.validate({}) == {} # noqa (suggestion is wrong for this case) + assert validator.validate({}) == {} # Invalid sections and options should just print a warning rather than fail - assert validator.validate({'bad_section': {}}) == {} # noqa (suggestion is wrong for this case) - assert validator.validate({'bad_section': {'bad_option'}}) == {} # noqa (suggestion is wrong for this case) + assert validator.validate({'bad_section': {}}) == {} + assert validator.validate({'bad_section': {'bad_option'}}) == {} # pylint: disable=C1803 def test_two(): @@ -40,7 +40,7 @@ def test_choices(): validator.option('section0', 'choice-key', choices=('A', 'B', 'C'), info='choice-based option') assert (validator.validate({'section0': {'choice-key': 'B'}}) - == {'section0': {'choice-key': 'B'}}) # noqa + == {'section0': {'choice-key': 'B'}}) with pytest.raises(ConfigError): assert validator.validate({'section0': {'choice-key': 'E'}}) @@ -59,7 +59,7 @@ def test_depends_on(): with pytest.raises(ConfigError): validator.validate({'one': {'key': 'A'}}) assert (validator.validate({'one': {'key': 'A'}, 'two': {'some-key': '123'}}) - == {'one': {'key': 'A'}, 'two': {'some-key': '123'}}) # noqa + == {'one': {'key': 'A'}, 'two': {'some-key': '123'}}) def test_depends_on_order(): diff --git a/beeflow/tests/test_db_client.py b/beeflow/tests/test_db_client.py index e6e6e2516..59574b4d6 100644 --- a/beeflow/tests/test_db_client.py +++ b/beeflow/tests/test_db_client.py @@ -1,4 +1,9 @@ """Tests of the client database.""" + +# Disable W0621: Pylint complains about redefining 'temp_db' from the outer +# scope. This is how pytest fixtures work. +# pylint:disable=W0621 + import tempfile import os @@ -38,6 +43,3 @@ def test_info(temp_db): assert host_name == 'front_end_name' assert backend_stat == 'true' -# Ignore W0621: PyLama complains about redefining 'temp_db' from the outer -# scope. This is how pytest fixtures work. -# pylama:ignore=W0621 diff --git a/beeflow/tests/test_db_sched.py b/beeflow/tests/test_db_sched.py index 16db1e93d..5342e66de 100644 --- a/beeflow/tests/test_db_sched.py +++ b/beeflow/tests/test_db_sched.py @@ -1,4 +1,9 @@ """Tests of the scheduler database.""" + +# Disable W0621: Pylint complains about redefining 'temp_db' from the outer +# scope. This is how pytest fixtures work. +# pylint:disable=W0621 + import tempfile import os @@ -38,6 +43,3 @@ def test_clear(temp_db): db.resources.clear() assert len(list(db.resources)) == 0 -# Ignore W0621: PyLama complains about redefining 'temp_db' from the outer -# scope. This is how pytest fixtures work. -# pylama:ignore=W0621 diff --git a/beeflow/tests/test_db_tm.py b/beeflow/tests/test_db_tm.py index 467ca77f2..2d99c0626 100644 --- a/beeflow/tests/test_db_tm.py +++ b/beeflow/tests/test_db_tm.py @@ -1,4 +1,9 @@ """Tests of the TM database.""" + +# Disable W0621: Pylint complains about redefining 'temp_db' from the outer +# scope. This is how pytest fixtures work. +# pylint:disable=W0621 + import tempfile import os @@ -183,6 +188,3 @@ def test_update_queue_order(temp_db): db.update_queue.clear() assert db.update_queue.updates() == [] -# Ignore W0621: PyLama complains about redefining 'temp_db' from the outer -# scope. This is how pytest fixtures work. -# pylama:ignore=W0621 diff --git a/beeflow/tests/test_parser.py b/beeflow/tests/test_parser.py index 2ba641770..843d19add 100644 --- a/beeflow/tests/test_parser.py +++ b/beeflow/tests/test_parser.py @@ -38,8 +38,8 @@ def test_parse_workflow_yaml(self): def test_parse_workflow_script(self): """Test parsing of workflow with a YAML input job file.""" - cwl_wf_file = find("beeflow/data/cwl/bee_workflows/clamr-ffmpeg-build_script/clamr_wf.cwl") #noqa - cwl_job_yaml = find("beeflow/data/cwl/bee_workflows/clamr-ffmpeg-build_script/clamr_job.yml") #noqa + cwl_wf_file = find("beeflow/data/cwl/bee_workflows/clamr-ffmpeg-build_script/clamr_wf.cwl") # pylint: disable=C0301 + cwl_job_yaml = find("beeflow/data/cwl/bee_workflows/clamr-ffmpeg-build_script/clamr_job.yml") # pylint: disable=C0301 workflow_id = generate_workflow_id() @@ -52,8 +52,8 @@ def test_parse_workflow_script(self): def test_parse_workflow_validate_script(self): """Test parsing of workflow and validate pre/post script files.""" - cwl_wf_file = find("beeflow/data/cwl/bee_workflows/clamr-ffmpeg-validate_script/clamr_wf.cwl") #noqa - cwl_job_yaml = find("beeflow/data/cwl/bee_workflows/clamr-ffmpeg-validate_script/clamr_job.yml") #noqa + cwl_wf_file = find("beeflow/data/cwl/bee_workflows/clamr-ffmpeg-validate_script/clamr_wf.cwl") # pylint: disable=C0301 + cwl_job_yaml = find("beeflow/data/cwl/bee_workflows/clamr-ffmpeg-validate_script/clamr_job.yml") # pylint: disable=C0301 workflow_id = generate_workflow_id() @@ -64,15 +64,15 @@ def test_parse_workflow_validate_script(self): def test_parse_workflow_validate_shell(self): """Test parsing of workflow and check shell option matches pre/post script shebang line.""" - cwl_wf_file = find("ci/test_workflows/shell_validate/workflow.cwl") #noqa - cwl_job_yaml = find("ci/test_workflows/shell_validate/input.yml") #noqa + cwl_wf_file = find("ci/test_workflows/shell_validate/workflow.cwl") + cwl_job_yaml = find("ci/test_workflows/shell_validate/input.yml") workflow_id = generate_workflow_id() with self.assertRaises(Exception) as context: self.parser.parse_workflow(workflow_id, cwl_wf_file, cwl_job_yaml) - self.assertEqual(context.exception.args[0], "CWL file shell #!/bin/bash does not match post.sh shell #!/bin/bashoo") #noqa + self.assertEqual(context.exception.args[0], "CWL file shell #!/bin/bash does not match post.sh shell #!/bin/bashoo") # pylint: disable=C0301 def test_parse_workflow_json(self): """Test parsing of workflow with a JSON input job file.""" @@ -164,8 +164,8 @@ def test_parse_workflow_missing_input(self): Task( name='clamr', base_command='/CLAMR/clamr_cpuonly', - hints=[Hint(class_='DockerRequirement', params={'dockerFile': '# Dockerfile.clamr-ffmpeg\n# Developed on Chicoma @lanl\n# Patricia Grubel \n\nFROM debian:11\n\n\nRUN apt-get update && \\\n apt-get install -y wget gnupg git cmake ffmpeg g++ make openmpi-bin libopenmpi-dev libpng-dev libpng16-16 libpng-tools imagemagick libmagickwand-6.q16-6 libmagickwand-6.q16-dev\n\nRUN git clone https://github.com/lanl/CLAMR.git\nRUN cd CLAMR && cmake . && make clamr_cpuonly\n', 'beeflow:containerName': 'clamr-ffmpeg'}), #noqa - Hint(class_='beeflow:ScriptRequirement', params={'enabled': True, 'pre_script': 'echo "Before run"', 'post_script': 'echo "After run"', 'shell': '/bin/bash'})], #noqa + hints=[Hint(class_='DockerRequirement', params={'dockerFile': '# Dockerfile.clamr-ffmpeg\n# Developed on Chicoma @lanl\n# Patricia Grubel \n\nFROM debian:11\n\n\nRUN apt-get update && \\\n apt-get install -y wget gnupg git cmake ffmpeg g++ make openmpi-bin libopenmpi-dev libpng-dev libpng16-16 libpng-tools imagemagick libmagickwand-6.q16-6 libmagickwand-6.q16-dev\n\nRUN git clone https://github.com/lanl/CLAMR.git\nRUN cd CLAMR && cmake . && make clamr_cpuonly\n', 'beeflow:containerName': 'clamr-ffmpeg'}), # pylint: disable=C0301 + Hint(class_='beeflow:ScriptRequirement', params={'enabled': True, 'pre_script': 'echo "Before run"', 'post_script': 'echo "After run"', 'shell': '/bin/bash'})], # pylint: disable=C0301 requirements=[], inputs=[StepInput(id='graphic_steps', type='int', value=None, default=None, source='steps_between_graphics', prefix='-g', position=None, @@ -194,7 +194,7 @@ def test_parse_workflow_missing_input(self): Task( name='ffmpeg', base_command='ffmpeg -y', - hints=[Hint(class_='DockerRequirement', params={'dockerFile': '# Dockerfile.clamr-ffmpeg\n# Developed on Chicoma @lanl\n# Patricia Grubel \n\nFROM debian:11\n\n\nRUN apt-get update && \\\n apt-get install -y wget gnupg git cmake ffmpeg g++ make openmpi-bin libopenmpi-dev libpng-dev libpng16-16 libpng-tools imagemagick libmagickwand-6.q16-6 libmagickwand-6.q16-dev\n\nRUN git clone https://github.com/lanl/CLAMR.git\nRUN cd CLAMR && cmake . && make clamr_cpuonly\n', 'beeflow:containerName': 'clamr-ffmpeg'})], # noqa + hints=[Hint(class_='DockerRequirement', params={'dockerFile': '# Dockerfile.clamr-ffmpeg\n# Developed on Chicoma @lanl\n# Patricia Grubel \n\nFROM debian:11\n\n\nRUN apt-get update && \\\n apt-get install -y wget gnupg git cmake ffmpeg g++ make openmpi-bin libopenmpi-dev libpng-dev libpng16-16 libpng-tools imagemagick libmagickwand-6.q16-6 libmagickwand-6.q16-dev\n\nRUN git clone https://github.com/lanl/CLAMR.git\nRUN cd CLAMR && cmake . && make clamr_cpuonly\n', 'beeflow:containerName': 'clamr-ffmpeg'})], # pylint: disable=C0301 requirements=[], inputs=[StepInput(id='ffmpeg_input', type='Directory', value=None, default=None, source='clamr/outdir', prefix='-i', position=2, @@ -223,8 +223,8 @@ def test_parse_workflow_missing_input(self): Task( name='clamr', base_command='/CLAMR/clamr_cpuonly', - hints=[Hint(class_='DockerRequirement', params={'dockerFile': '# Dockerfile.clamr-ffmpeg\n# Developed on Chicoma @lanl\n# Patricia Grubel \n\nFROM debian:11\n\n\nRUN apt-get update && \\\n apt-get install -y wget gnupg git cmake ffmpeg g++ make openmpi-bin libopenmpi-dev libpng-dev libpng16-16 libpng-tools imagemagick libmagickwand-6.q16-6 libmagickwand-6.q16-dev\n\nRUN git clone https://github.com/lanl/CLAMR.git\nRUN cd CLAMR && cmake . && make clamr_cpuonly\n', 'beeflow:containerName': 'clamr-ffmpeg'}), #noqa - Hint(class_='beeflow:ScriptRequirement', params={'enabled': True, 'pre_script': 'echo "Before run"\n', 'post_script': 'echo "After run"\n'})], #noqa + hints=[Hint(class_='DockerRequirement', params={'dockerFile': '# Dockerfile.clamr-ffmpeg\n# Developed on Chicoma @lanl\n# Patricia Grubel \n\nFROM debian:11\n\n\nRUN apt-get update && \\\n apt-get install -y wget gnupg git cmake ffmpeg g++ make openmpi-bin libopenmpi-dev libpng-dev libpng16-16 libpng-tools imagemagick libmagickwand-6.q16-6 libmagickwand-6.q16-dev\n\nRUN git clone https://github.com/lanl/CLAMR.git\nRUN cd CLAMR && cmake . && make clamr_cpuonly\n', 'beeflow:containerName': 'clamr-ffmpeg'}), # pylint: disable=C0301 + Hint(class_='beeflow:ScriptRequirement', params={'enabled': True, 'pre_script': 'echo "Before run"\n', 'post_script': 'echo "After run"\n'})], # pylint: disable=C0301 requirements=[], inputs=[StepInput(id='graphic_steps', type='int', value=None, default=None, source='steps_between_graphics', prefix='-g', position=None, @@ -253,7 +253,7 @@ def test_parse_workflow_missing_input(self): Task( name='ffmpeg', base_command='ffmpeg -y', - hints=[Hint(class_='DockerRequirement', params={'dockerFile': '# Dockerfile.clamr-ffmpeg\n# Developed on Chicoma @lanl\n# Patricia Grubel \n\nFROM debian:11\n\n\nRUN apt-get update && \\\n apt-get install -y wget gnupg git cmake ffmpeg g++ make openmpi-bin libopenmpi-dev libpng-dev libpng16-16 libpng-tools imagemagick libmagickwand-6.q16-6 libmagickwand-6.q16-dev\n\nRUN git clone https://github.com/lanl/CLAMR.git\nRUN cd CLAMR && cmake . && make clamr_cpuonly\n', 'beeflow:containerName': 'clamr-ffmpeg'})], # noqa + hints=[Hint(class_='DockerRequirement', params={'dockerFile': '# Dockerfile.clamr-ffmpeg\n# Developed on Chicoma @lanl\n# Patricia Grubel \n\nFROM debian:11\n\n\nRUN apt-get update && \\\n apt-get install -y wget gnupg git cmake ffmpeg g++ make openmpi-bin libopenmpi-dev libpng-dev libpng16-16 libpng-tools imagemagick libmagickwand-6.q16-6 libmagickwand-6.q16-dev\n\nRUN git clone https://github.com/lanl/CLAMR.git\nRUN cd CLAMR && cmake . && make clamr_cpuonly\n', 'beeflow:containerName': 'clamr-ffmpeg'})], # pylint: disable=C0301 requirements=[], inputs=[StepInput(id='ffmpeg_input', type='Directory', value=None, default=None, source='clamr/outdir', prefix='-i', position=2, @@ -282,7 +282,7 @@ def test_parse_workflow_missing_input(self): Task( name='clamr', base_command='/CLAMR/clamr_cpuonly', - hints=[Hint(class_='DockerRequirement', params={'dockerFile': '# Dockerfile.clamr-ffmpeg\n# Developed on Chicoma @lanl\n# Patricia Grubel \n\nFROM debian:11\n\n\nRUN apt-get update && \\\n apt-get install -y wget gnupg git cmake ffmpeg g++ make openmpi-bin libopenmpi-dev libpng-dev libpng16-16 libpng-tools imagemagick libmagickwand-6.q16-6 libmagickwand-6.q16-dev\n\nRUN git clone https://github.com/lanl/CLAMR.git\nRUN cd CLAMR && cmake . && make clamr_cpuonly\n', 'beeflow:containerName': 'clamr-ffmpeg'})], # noqa + hints=[Hint(class_='DockerRequirement', params={'dockerFile': '# Dockerfile.clamr-ffmpeg\n# Developed on Chicoma @lanl\n# Patricia Grubel \n\nFROM debian:11\n\n\nRUN apt-get update && \\\n apt-get install -y wget gnupg git cmake ffmpeg g++ make openmpi-bin libopenmpi-dev libpng-dev libpng16-16 libpng-tools imagemagick libmagickwand-6.q16-6 libmagickwand-6.q16-dev\n\nRUN git clone https://github.com/lanl/CLAMR.git\nRUN cd CLAMR && cmake . && make clamr_cpuonly\n', 'beeflow:containerName': 'clamr-ffmpeg'})], # pylint: disable=C0301 requirements=[], inputs=[StepInput(id='graphic_steps', type='int', value=None, default=None, source='steps_between_graphics', prefix='-g', position=None, @@ -311,7 +311,7 @@ def test_parse_workflow_missing_input(self): Task( name='ffmpeg', base_command='ffmpeg -y', - hints=[Hint(class_='DockerRequirement', params={'dockerFile': '# Dockerfile.clamr-ffmpeg\n# Developed on Chicoma @lanl\n# Patricia Grubel \n\nFROM debian:11\n\n\nRUN apt-get update && \\\n apt-get install -y wget gnupg git cmake ffmpeg g++ make openmpi-bin libopenmpi-dev libpng-dev libpng16-16 libpng-tools imagemagick libmagickwand-6.q16-6 libmagickwand-6.q16-dev\n\nRUN git clone https://github.com/lanl/CLAMR.git\nRUN cd CLAMR && cmake . && make clamr_cpuonly\n', 'beeflow:containerName': 'clamr-ffmpeg'})], # noqa + hints=[Hint(class_='DockerRequirement', params={'dockerFile': '# Dockerfile.clamr-ffmpeg\n# Developed on Chicoma @lanl\n# Patricia Grubel \n\nFROM debian:11\n\n\nRUN apt-get update && \\\n apt-get install -y wget gnupg git cmake ffmpeg g++ make openmpi-bin libopenmpi-dev libpng-dev libpng16-16 libpng-tools imagemagick libmagickwand-6.q16-6 libmagickwand-6.q16-dev\n\nRUN git clone https://github.com/lanl/CLAMR.git\nRUN cd CLAMR && cmake . && make clamr_cpuonly\n', 'beeflow:containerName': 'clamr-ffmpeg'})], # pylint: disable=C0301 requirements=[], inputs=[StepInput(id='ffmpeg_input', type='Directory', value=None, default=None, source='clamr/outdir', prefix='-i', position=2, @@ -362,7 +362,7 @@ def test_parse_workflow_missing_input(self): workflow_id=WORKFLOW_NOJOB_GOLD.id), Task( name='ffmpeg', - base_command='ffmpeg -f image2 -i $HOME/graphics_output/graph%05d.png -r 12 -s 800x800 -pix_fmt yuv420p $HOME/CLAMR_movie.mp4', # noqa + base_command='ffmpeg -f image2 -i $HOME/graphics_output/graph%05d.png -r 12 -s 800x800 -pix_fmt yuv420p $HOME/CLAMR_movie.mp4', # pylint: disable=C0301 hints=[], requirements=[], inputs=[StepInput(id='infile', type='File', value=None, default='graphics_output', diff --git a/beeflow/tests/test_scheduler_resource_allocation.py b/beeflow/tests/test_scheduler_resource_allocation.py index b2d22f21c..8fc3f6596 100644 --- a/beeflow/tests/test_scheduler_resource_allocation.py +++ b/beeflow/tests/test_scheduler_resource_allocation.py @@ -1,4 +1,8 @@ """Test the resource_allocation submodule of BEE.""" + +# Disable W0511: This is related to issue #333 +# pylint:disable=W0511 + import json from beeflow.scheduler import resource_allocation @@ -70,5 +74,3 @@ def test_requirements_encode_decode_json(): decoded = resource_allocation.Requirements.decode(json.loads(s)) assert decoded.max_runtime == requirements.max_runtime assert decoded.nodes == requirements.nodes -# Ignore W0511: This is related to issue #333 -# pylama:ignore=W0511 diff --git a/beeflow/tests/test_scheduler_rest.py b/beeflow/tests/test_scheduler_rest.py index fad54b560..76d0f9be4 100644 --- a/beeflow/tests/test_scheduler_rest.py +++ b/beeflow/tests/test_scheduler_rest.py @@ -2,6 +2,11 @@ Tests of the REST interface for BEE Scheduler. """ + +# Disable R1732: This suggestion about using `with` doesn't apply here. +# Disable W0621: These are fixtures; it's supposed to work this way. +# pylint:disable=R1732,W0621 + import os import tempfile import pytest @@ -214,8 +219,3 @@ def test_schedule_multi_job_two_resources(scheduler): assert len(data[2]['allocations']) > 0 # Ensure proper scheduled time assert data[2]['allocations'][0]['start_time'] < 6 - - -# Ignore R1732: This suggestion about using `with` doesn't apply here. -# Ignore W0621: These are fixtures; it's supposed to work this way. -# pylama:ignore=R1732,W0621 diff --git a/beeflow/tests/test_slurm_worker.py b/beeflow/tests/test_slurm_worker.py index 426c4b47d..45c655290 100644 --- a/beeflow/tests/test_slurm_worker.py +++ b/beeflow/tests/test_slurm_worker.py @@ -1,4 +1,11 @@ """Tests of the Slurm worker.""" + +# Disable R1732: This is not what we need to do with the Popen of slurmrestd above; +# using a with statement doesn't kill the process immediately but just +# waits for it to complete and slurmrestd never will unless we kill it. +# Disable W0621: Redefinition of names is required for pytest +# pylint:disable=R1732,W0621 + import uuid import shutil import time @@ -119,10 +126,3 @@ def test_no_slurmrestd(slurmrestd_worker_no_daemon): assert state == 'NOT_RESPONDING' assert worker.query_task(job_id) == 'NOT_RESPONDING' assert worker.cancel_task(job_id) == 'NOT_RESPONDING' -# Ignoring R1732: This is not what we need to do with the Popen of slurmrestd above; -# using a with statement doesn't kill the process immediately but just -# waits for it to complete and slurmrestd never will unless we kill it. -# Ignoring E402: "module level import not at top of file" - this is required for -# bee config -# Ignoring W0621: Redefinition of names is required for pytest -# pylama:ignore=R1732,E402,W0621 diff --git a/beeflow/tests/test_tm.py b/beeflow/tests/test_tm.py index 35491e99a..a18515bff 100644 --- a/beeflow/tests/test_tm.py +++ b/beeflow/tests/test_tm.py @@ -43,7 +43,7 @@ def temp_db(): @pytest.mark.usefixtures('flask_client', 'mocker') -def test_submit_task(flask_client, mocker, temp_db): # noqa +def test_submit_task(flask_client, mocker, temp_db): # pylint: disable=W0621 """Create a workflow and get the ID back.""" mocker.patch('beeflow.task_manager.utils.worker_interface', MockWorkerSubmission) @@ -78,7 +78,7 @@ def test_submit_task(flask_client, mocker, temp_db): # noqa @pytest.mark.usefixtures('flask_client', 'mocker') -def test_completed_task(flask_client, mocker, temp_db): # noqa +def test_completed_task(flask_client, mocker, temp_db): # pylint: disable=W0613,W0621 """Tests how the task manager processes a completed task.""" # 42 is the sample task ID mocker.patch('beeflow.task_manager.utils.worker_interface', @@ -94,7 +94,7 @@ def test_completed_task(flask_client, mocker, temp_db): # noqa @pytest.mark.usefixtures('flask_client', 'mocker') -def test_remove_task(flask_client, mocker, temp_db): # noqa +def test_remove_task(flask_client, mocker, temp_db): # pylint: disable=W0621 """Test cancelling a workflow and removing tasks.""" task1, task2, task3 = generate_tasks(3) # Add a few tasks diff --git a/beeflow/tests/test_wf_interface.py b/beeflow/tests/test_wf_interface.py index 84d9d53e5..038190935 100644 --- a/beeflow/tests/test_wf_interface.py +++ b/beeflow/tests/test_wf_interface.py @@ -1,6 +1,9 @@ #! /usr/bin/env python3 """Unit test module for the BEE workflow interface module.""" +# Disable W0212: Access required for unit tests +# pylint:disable=W0212 + import unittest from beeflow.common.wf_data import (Workflow, Task, Requirement, Hint, InputParameter, @@ -687,6 +690,3 @@ def _create_test_tasks(self, workflow_id): if __name__ == "__main__": unittest.main() -# Ignore W0212: Access required for unit tests -# Ignore E402: "module level import not at top of file" - this is required for bee config -# pylama:ignore=W0212,E402 diff --git a/beeflow/tests/test_wf_manager.py b/beeflow/tests/test_wf_manager.py index 08025e6d4..039266257 100644 --- a/beeflow/tests/test_wf_manager.py +++ b/beeflow/tests/test_wf_manager.py @@ -1,5 +1,7 @@ """Unit tests for the workflow manager.""" +# pylint:disable=W0621,W0613 + import tempfile import os import pathlib @@ -258,4 +260,3 @@ def test_resume_workflow(client, mocker, setup_teardown_workflow, temp_db): resp = client().patch(f'/bee_wfm/v1/jobs/{WF_ID}', json=request) assert resp.json['status'] == 'Workflow Resumed' assert resp.status_code == 200 -# pylama:ignore=W0621,W0613 diff --git a/beeflow/wf_manager/resources/wf_list.py b/beeflow/wf_manager/resources/wf_list.py index 48e491167..d8c1eda43 100644 --- a/beeflow/wf_manager/resources/wf_list.py +++ b/beeflow/wf_manager/resources/wf_list.py @@ -10,7 +10,7 @@ from flask import make_response, jsonify from werkzeug.datastructures import FileStorage from flask_restful import Resource, reqparse -from celery import shared_task # noqa (pylama can't find celery imports) +from celery import shared_task from beeflow.common import log as bee_logging # from beeflow.common.wf_profiler import WorkflowProfiler diff --git a/beeflow/wf_manager/resources/wf_update.py b/beeflow/wf_manager/resources/wf_update.py index 534e3e3c5..a31586c8f 100644 --- a/beeflow/wf_manager/resources/wf_update.py +++ b/beeflow/wf_manager/resources/wf_update.py @@ -83,7 +83,7 @@ def put(self): for state_update in state_updates: self.update_task_state(state_update, db) - return make_response(jsonify(status=('Tasks updated successfully')), 200) + return make_response(jsonify(status='Tasks updated successfully'), 200) def handle_metadata(self, state_update, task, wfi): """Handle metadata for a task update.""" diff --git a/beeflow/wf_manager/resources/wf_utils.py b/beeflow/wf_manager/resources/wf_utils.py index ced30eb87..156adf5a7 100644 --- a/beeflow/wf_manager/resources/wf_utils.py +++ b/beeflow/wf_manager/resources/wf_utils.py @@ -4,6 +4,7 @@ import shutil import requests import jsonpickle +from celery import shared_task # pylint: disable=W0611 # pylint can't find celery imports from beeflow.common import log as bee_logging from beeflow.common.config_driver import BeeConfig as bc @@ -16,7 +17,6 @@ from beeflow.common.db import wfm_db from beeflow.common.db.bdb import connect_db -from celery import shared_task #noqa (pylama can't find celery imports) log = bee_logging.setup(__name__) @@ -190,8 +190,7 @@ def _resource(component, tag=""): # Submit tasks to the TM -# pylama:ignore=W0613 -def submit_tasks_tm(wf_id, tasks, allocation): +def submit_tasks_tm(wf_id, tasks, allocation): # pylint: disable=W0613 """Submit a task to the task manager.""" wfi = get_workflow_interface(wf_id) for task in tasks: @@ -251,7 +250,7 @@ def submit_tasks_scheduler(tasks): def schedule_submit_tasks(wf_id, tasks): """Schedule and then submit tasks to the TM.""" # Submit ready tasks to the scheduler - allocation = submit_tasks_scheduler(tasks) #NOQA + allocation = submit_tasks_scheduler(tasks) # Submit tasks to TM submit_tasks_tm(wf_id, tasks, allocation) @@ -265,7 +264,7 @@ def connect_neo4j_driver(bolt_port): driver.create_bee_node() -def setup_workflow(wf_id, wf_name, wf_dir, wf_workdir, no_start, workflow=None, +def setup_workflow(wf_id, wf_name, wf_dir, wf_workdir, no_start, workflow=None, # pylint: disable=W0613 tasks=None): """Initialize Workflow in Separate Process.""" wfi = get_workflow_interface(wf_id) diff --git a/beeflow/wf_manager/wf_manager.py b/beeflow/wf_manager/wf_manager.py index d55e28c1d..dbc789e11 100644 --- a/beeflow/wf_manager/wf_manager.py +++ b/beeflow/wf_manager/wf_manager.py @@ -2,8 +2,8 @@ import os from flask import Flask -from celery import Celery # noqa (pylama can't find celery imports) -from celery import shared_task #noqa +from celery import Celery +from celery import shared_task # pylint: disable=W0611 # pylint can't find celery imports from beeflow.common.api import BeeApi from beeflow.common import paths from beeflow.wf_manager.resources.wf_list import WFList diff --git a/docs/poetry_tutorial/README.md b/docs/poetry_tutorial/README.md index f30d285f3..2d2d5f896 100644 --- a/docs/poetry_tutorial/README.md +++ b/docs/poetry_tutorial/README.md @@ -29,7 +29,7 @@ Add Python package dependencies (from PyPI) to `pyproject.toml`: `poetry add ...` -Add developer dependencies (e.g. `pylama`, `sphinx`): +Add developer dependencies (e.g. `pylint`, `sphinx`): `poetry add --dev ...` diff --git a/docs/sphinx/contribute.rst b/docs/sphinx/contribute.rst index 9dc2fa707..7f137fc8c 100644 --- a/docs/sphinx/contribute.rst +++ b/docs/sphinx/contribute.rst @@ -14,14 +14,20 @@ Upon release, develop will be merged by the team lead into main. Additionally, a Style Guide =========== -BEE is python code and adheres to style guidelines specified in **setup.cfg**. Before attempting to commit and push changes, please install our pre-commit githooks by running the following command in project root: +BEE is python code and adheres to style guidelines specified in **setup.cfg**, enforced using `pylint `_. Before attempting to commit and push changes, please set up the git hook scripts by running the following command in the project root: -If using `git --version` >= 2.9: - git config core.hooksPath .githooks +.. code-block:: -Otherwise: - cp .githooks/* .git/hooks/ + pre-commit install -Using these git hooks will ensure your contributions adhere to style guidelines required for contribution. You will need to repeat these steps for every `BEE` repo you clone. +Important Notes: +---------------- +* To use the git hooks, you must have your Poetry environment set up and activated, as the hooks rely on the environment to run necessary checks. +* If you wish to skip running the git hook for a specific commit, you can do so by using the following command: +.. code-block:: + + SKIP=pylint git commit -m "foo" + +* Using these git hooks will ensure your contributions adhere to style guidelines required for contribution. You will need to repeat these steps for every **BEE** repo you clone. diff --git a/poetry.lock b/poetry.lock index c3a5937c0..a8dca1b70 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "alabaster" @@ -119,22 +119,17 @@ test = ["coverage", "mypy", "pexpect", "ruff", "wheel"] [[package]] name = "astroid" -version = "2.13.5" +version = "3.2.4" description = "An abstract syntax tree for Python with inference support." optional = false -python-versions = ">=3.7.2" +python-versions = ">=3.8.0" files = [ - {file = "astroid-2.13.5-py3-none-any.whl", hash = "sha256:6891f444625b6edb2ac798829b689e95297e100ddf89dbed5a8c610e34901501"}, - {file = "astroid-2.13.5.tar.gz", hash = "sha256:df164d5ac811b9f44105a72b8f9d5edfb7b5b2d7e979b04ea377a77b3229114a"}, + {file = "astroid-3.2.4-py3-none-any.whl", hash = "sha256:413658a61eeca6202a59231abb473f932038fbcbf1666587f66d482083413a25"}, + {file = "astroid-3.2.4.tar.gz", hash = "sha256:0e14202810b30da1b735827f78f5157be2bbd4a7a59b7707ca0bfc2fb4c0063a"}, ] [package.dependencies] -lazy-object-proxy = ">=1.4.0" typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} -wrapt = [ - {version = ">=1.11,<2", markers = "python_version < \"3.11\""}, - {version = ">=1.14,<2", markers = "python_version >= \"3.11\""}, -] [[package]] name = "async-timeout" @@ -439,6 +434,17 @@ files = [ [package.dependencies] pycparser = "*" +[[package]] +name = "cfgv" +version = "3.4.0" +description = "Validate configuration and produce human readable error messages." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, + {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, +] + [[package]] name = "charset-normalizer" version = "3.4.0" @@ -936,6 +942,17 @@ files = [ graph = ["objgraph (>=1.7.2)"] profile = ["gprof2dot (>=2022.7.29)"] +[[package]] +name = "distlib" +version = "0.3.9" +description = "Distribution utilities" +optional = false +python-versions = "*" +files = [ + {file = "distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87"}, + {file = "distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403"}, +] + [[package]] name = "docutils" version = "0.18.1" @@ -1359,6 +1376,20 @@ files = [ [package.dependencies] pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_version >= \"3.8\""} +[[package]] +name = "identify" +version = "2.6.1" +description = "File identification library for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "identify-2.6.1-py2.py3-none-any.whl", hash = "sha256:53863bcac7caf8d2ed85bd20312ea5dcfc22226800f6d6881f232d861db5a8f0"}, + {file = "identify-2.6.1.tar.gz", hash = "sha256:91478c5fb7c3aac5ff7bf9b4344f803843dc586832d5f110d672b19aa1984c98"}, +] + +[package.extras] +license = ["ukkonen"] + [[package]] name = "idna" version = "3.10" @@ -1616,52 +1647,6 @@ sqs = ["boto3 (>=1.26.143)", "pycurl (>=7.43.0.5)", "urllib3 (>=1.26.16)"] yaml = ["PyYAML (>=3.10)"] zookeeper = ["kazoo (>=2.8.0)"] -[[package]] -name = "lazy-object-proxy" -version = "1.10.0" -description = "A fast and thorough lazy object proxy." -optional = false -python-versions = ">=3.8" -files = [ - {file = "lazy-object-proxy-1.10.0.tar.gz", hash = "sha256:78247b6d45f43a52ef35c25b5581459e85117225408a4128a3daf8bf9648ac69"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:855e068b0358ab916454464a884779c7ffa312b8925c6f7401e952dcf3b89977"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab7004cf2e59f7c2e4345604a3e6ea0d92ac44e1c2375527d56492014e690c3"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc0d2fc424e54c70c4bc06787e4072c4f3b1aa2f897dfdc34ce1013cf3ceef05"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e2adb09778797da09d2b5ebdbceebf7dd32e2c96f79da9052b2e87b6ea495895"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b1f711e2c6dcd4edd372cf5dec5c5a30d23bba06ee012093267b3376c079ec83"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-win32.whl", hash = "sha256:76a095cfe6045c7d0ca77db9934e8f7b71b14645f0094ffcd842349ada5c5fb9"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:b4f87d4ed9064b2628da63830986c3d2dca7501e6018347798313fcf028e2fd4"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fec03caabbc6b59ea4a638bee5fce7117be8e99a4103d9d5ad77f15d6f81020c"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02c83f957782cbbe8136bee26416686a6ae998c7b6191711a04da776dc9e47d4"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:009e6bb1f1935a62889ddc8541514b6a9e1fcf302667dcb049a0be5c8f613e56"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:75fc59fc450050b1b3c203c35020bc41bd2695ed692a392924c6ce180c6f1dc9"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:782e2c9b2aab1708ffb07d4bf377d12901d7a1d99e5e410d648d892f8967ab1f"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-win32.whl", hash = "sha256:edb45bb8278574710e68a6b021599a10ce730d156e5b254941754a9cc0b17d03"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:e271058822765ad5e3bca7f05f2ace0de58a3f4e62045a8c90a0dfd2f8ad8cc6"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e98c8af98d5707dcdecc9ab0863c0ea6e88545d42ca7c3feffb6b4d1e370c7ba"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:952c81d415b9b80ea261d2372d2a4a2332a3890c2b83e0535f263ddfe43f0d43"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80b39d3a151309efc8cc48675918891b865bdf742a8616a337cb0090791a0de9"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e221060b701e2aa2ea991542900dd13907a5c90fa80e199dbf5a03359019e7a3"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:92f09ff65ecff3108e56526f9e2481b8116c0b9e1425325e13245abfd79bdb1b"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-win32.whl", hash = "sha256:3ad54b9ddbe20ae9f7c1b29e52f123120772b06dbb18ec6be9101369d63a4074"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:127a789c75151db6af398b8972178afe6bda7d6f68730c057fbbc2e96b08d282"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9e4ed0518a14dd26092614412936920ad081a424bdcb54cc13349a8e2c6d106a"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ad9e6ed739285919aa9661a5bbed0aaf410aa60231373c5579c6b4801bd883c"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fc0a92c02fa1ca1e84fc60fa258458e5bf89d90a1ddaeb8ed9cc3147f417255"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0aefc7591920bbd360d57ea03c995cebc204b424524a5bd78406f6e1b8b2a5d8"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5faf03a7d8942bb4476e3b62fd0f4cf94eaf4618e304a19865abf89a35c0bbee"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-win32.whl", hash = "sha256:e333e2324307a7b5d86adfa835bb500ee70bfcd1447384a822e96495796b0ca4"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:cb73507defd385b7705c599a94474b1d5222a508e502553ef94114a143ec6696"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:366c32fe5355ef5fc8a232c5436f4cc66e9d3e8967c01fb2e6302fd6627e3d94"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2297f08f08a2bb0d32a4265e98a006643cd7233fb7983032bd61ac7a02956b3b"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18dd842b49456aaa9a7cf535b04ca4571a302ff72ed8740d06b5adcd41fe0757"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:217138197c170a2a74ca0e05bddcd5f1796c735c37d0eee33e43259b192aa424"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9a3a87cf1e133e5b1994144c12ca4aa3d9698517fe1e2ca82977781b16955658"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-win32.whl", hash = "sha256:30b339b2a743c5288405aa79a69e706a06e02958eab31859f7f3c04980853b70"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:a899b10e17743683b293a729d3a11f2f399e8a90c73b089e29f5d0fe3509f0dd"}, - {file = "lazy_object_proxy-1.10.0-pp310.pp311.pp312.pp38.pp39-none-any.whl", hash = "sha256:80fa48bd89c8f2f456fc0765c11c23bf5af827febacd2f523ca5bc1893fcc09d"}, -] - [[package]] name = "lockfile" version = "0.12.2" @@ -2092,6 +2077,17 @@ doc = ["nb2plots (>=0.6)", "numpydoc (>=1.5)", "pillow (>=9.4)", "pydata-sphinx- extra = ["lxml (>=4.6)", "pydot (>=1.4.2)", "pygraphviz (>=1.10)", "sympy (>=1.10)"] test = ["codecov (>=2.1)", "pytest (>=7.2)", "pytest-cov (>=4.0)"] +[[package]] +name = "nodeenv" +version = "1.9.1" +description = "Node.js virtual environment builder" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, + {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, +] + [[package]] name = "openstacksdk" version = "4.0.1" @@ -2285,6 +2281,24 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "pre-commit" +version = "3.5.0" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pre_commit-3.5.0-py2.py3-none-any.whl", hash = "sha256:841dc9aef25daba9a0238cd27984041fa0467b4199fc4852e27950664919f660"}, + {file = "pre_commit-3.5.0.tar.gz", hash = "sha256:5804465c675b659b0862f07907f96295d490822a450c4c40e747d0b1c6ebcb32"}, +] + +[package.dependencies] +cfgv = ">=2.0.0" +identify = ">=1.0.0" +nodeenv = ">=0.11.1" +pyyaml = ">=5.1" +virtualenv = ">=20.10.0" + [[package]] name = "prettytable" version = "3.11.0" @@ -2411,7 +2425,6 @@ description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs optional = true python-versions = ">=3.8" files = [ - {file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"}, {file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"}, ] @@ -2422,24 +2435,12 @@ description = "A collection of ASN.1-based protocols modules" optional = true python-versions = ">=3.8" files = [ - {file = "pyasn1_modules-0.4.1-py3-none-any.whl", hash = "sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd"}, {file = "pyasn1_modules-0.4.1.tar.gz", hash = "sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c"}, ] [package.dependencies] pyasn1 = ">=0.4.6,<0.7.0" -[[package]] -name = "pycodestyle" -version = "2.12.1" -description = "Python style guide checker" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pycodestyle-2.12.1-py2.py3-none-any.whl", hash = "sha256:46f0fb92069a7c28ab7bb558f05bfc0110dac69a0cd23c61ea0040283a9d78b3"}, - {file = "pycodestyle-2.12.1.tar.gz", hash = "sha256:6838eae08bbce4f6accd5d5572075c63626a15ee3e6f842df996bf62f6d73521"}, -] - [[package]] name = "pycparser" version = "2.22" @@ -2583,23 +2584,6 @@ files = [ [package.dependencies] typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" -[[package]] -name = "pydocstyle" -version = "6.1.1" -description = "Python docstring style checker" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pydocstyle-6.1.1-py3-none-any.whl", hash = "sha256:6987826d6775056839940041beef5c08cc7e3d71d63149b48e36727f70144dc4"}, - {file = "pydocstyle-6.1.1.tar.gz", hash = "sha256:1d41b7c459ba0ee6c345f2eb9ae827cab14a7533a88c5c6f7e94923f72df92dc"}, -] - -[package.dependencies] -snowballstemmer = "*" - -[package.extras] -toml = ["toml"] - [[package]] name = "pydot" version = "3.0.2" @@ -2619,17 +2603,6 @@ dev = ["chardet", "parameterized", "ruff"] release = ["zest.releaser[recommended]"] tests = ["chardet", "parameterized", "pytest", "pytest-cov", "pytest-xdist[psutil]", "ruff", "tox"] -[[package]] -name = "pyflakes" -version = "3.0.1" -description = "passive checker of Python programs" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pyflakes-3.0.1-py2.py3-none-any.whl", hash = "sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf"}, - {file = "pyflakes-3.0.1.tar.gz", hash = "sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd"}, -] - [[package]] name = "pygments" version = "2.18.0" @@ -2644,52 +2617,26 @@ files = [ [package.extras] windows-terminal = ["colorama (>=0.4.6)"] -[[package]] -name = "pylama" -version = "8.4.1" -description = "Code audit tool for python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pylama-8.4.1-py3-none-any.whl", hash = "sha256:5bbdbf5b620aba7206d688ed9fc917ecd3d73e15ec1a89647037a09fa3a86e60"}, - {file = "pylama-8.4.1.tar.gz", hash = "sha256:2d4f7aecfb5b7466216d48610c7d6bad1c3990c29cdd392ad08259b161e486f6"}, -] - -[package.dependencies] -mccabe = ">=0.7.0" -pycodestyle = ">=2.9.1" -pydocstyle = ">=6.1.1" -pyflakes = ">=2.5.0" - -[package.extras] -all = ["eradicate", "mypy", "pylint", "radon", "vulture"] -eradicate = ["eradicate"] -mypy = ["mypy"] -pylint = ["pylint"] -radon = ["radon"] -tests = ["eradicate (>=2.0.0)", "mypy", "pylama-quotes", "pylint (>=2.11.1)", "pytest (>=7.1.2)", "pytest-mypy", "radon (>=5.1.0)", "toml", "types-setuptools", "types-toml", "vulture"] -toml = ["toml (>=0.10.2)"] -vulture = ["vulture"] - [[package]] name = "pylint" -version = "2.15.9" +version = "3.2.7" description = "python code static checker" optional = false -python-versions = ">=3.7.2" +python-versions = ">=3.8.0" files = [ - {file = "pylint-2.15.9-py3-none-any.whl", hash = "sha256:349c8cd36aede4d50a0754a8c0218b43323d13d5d88f4b2952ddfe3e169681eb"}, - {file = "pylint-2.15.9.tar.gz", hash = "sha256:18783cca3cfee5b83c6c5d10b3cdb66c6594520ffae61890858fe8d932e1c6b4"}, + {file = "pylint-3.2.7-py3-none-any.whl", hash = "sha256:02f4aedeac91be69fb3b4bea997ce580a4ac68ce58b89eaefeaf06749df73f4b"}, + {file = "pylint-3.2.7.tar.gz", hash = "sha256:1b7a721b575eaeaa7d39db076b6e7743c993ea44f57979127c517c6c572c803e"}, ] [package.dependencies] -astroid = ">=2.12.13,<=2.14.0-dev0" +astroid = ">=3.2.4,<=3.3.0-dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} dill = [ {version = ">=0.2", markers = "python_version < \"3.11\""}, - {version = ">=0.3.6", markers = "python_version >= \"3.11\""}, + {version = ">=0.3.7", markers = "python_version >= \"3.12\""}, + {version = ">=0.3.6", markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, ] -isort = ">=4.2.5,<6" +isort = ">=4.2.5,<5.13.0 || >5.13.0,<6" mccabe = ">=0.6,<0.8" platformdirs = ">=2.2.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} @@ -3776,6 +3723,26 @@ files = [ {file = "vine-5.1.0.tar.gz", hash = "sha256:8b62e981d35c41049211cf62a0a1242d8c1ee9bd15bb196ce38aefd6799e61e0"}, ] +[[package]] +name = "virtualenv" +version = "20.29.0" +description = "Virtual Python Environment builder" +optional = false +python-versions = ">=3.8" +files = [ + {file = "virtualenv-20.29.0-py3-none-any.whl", hash = "sha256:c12311863497992dc4b8644f8ea82d3b35bb7ef8ee82e6630d76d0197c39baf9"}, + {file = "virtualenv-20.29.0.tar.gz", hash = "sha256:6345e1ff19d4b1296954cee076baaf58ff2a12a84a338c62b02eda39f20aa982"}, +] + +[package.dependencies] +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +platformdirs = ">=3.9.1,<5" + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] + [[package]] name = "wcwidth" version = "0.2.13" @@ -3808,7 +3775,7 @@ watchdog = ["watchdog (>=2.3)"] name = "wrapt" version = "1.17.0" description = "Module for decorators, wrappers and monkey patching." -optional = false +optional = true python-versions = ">=3.8" files = [ {file = "wrapt-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2a0c23b8319848426f305f9cb0c98a6e32ee68a36264f45948ccf8e7d2b941f8"}, @@ -3903,4 +3870,4 @@ cloud-extras = ["google-api-python-client", "python-heatclient", "python-opensta [metadata] lock-version = "2.0" python-versions = ">=3.8.3,<=3.13.0" -content-hash = "7b02ce5e0961182a48388732756059eb7cc2ff9039154aa3312c8b11e385ebe7" +content-hash = "5dc86a11a61981f58748d101787087085df2bf4b3f5e6e8a638943c4e2766678" diff --git a/pyproject.toml b/pyproject.toml index d6ab82d7f..ed5c8d477 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -81,18 +81,15 @@ python-heatclient = { version = "^3.1.0", optional = true } graphviz = "^0.20.3" networkx = "3.1" requests-unixsocket2 = "^0.4.2" +pylint = "3.2.7" +pre-commit = "3.5.0" [tool.poetry.extras] cloud_extras = ["google-api-python-client", "python-openstackclient", "python-heatclient"] [tool.poetry.dev-dependencies] # Developer dependencies -pycodestyle = { version = ">=2.5.0" } -# Newer version of pydocstyle break pylama -pydocstyle = "6.1.1" -pyflakes = "3.0.1" -pylama = "8.4.1" -pylint = "2.15.9" +pylint = "3.2.7" pytest = "7.2.0" pytest-mock = "3.3.1" pytest-cov = "5.0.0" diff --git a/setup.cfg b/setup.cfg index f30ca8daf..f4ecf1448 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,7 +1,7 @@ -[pylama] -format = pylint -linters = pycodestyle,pydocstyle,pyflakes,pylint -ignore = R0902,R0903,R0904,R0912,R0913,R0914,R0916,W0603,W1203,C0413,E0401 +[pylint] +max-line-length = 99 +good-names = i,j,k,v,t,m,s,n,x,y,ax,ex,fp,id,tx,fn,pi,wf,db,Run,_ +disable = R0902,R0903,R0904,R0912,R0913,R0914,R0916,W0603,W1203,C0413,E0401 # R0902: too many instance attributes (default: 8) # R0903: class has too few public methods (default: 2) # R0904: class has too many public methods (default: 20) @@ -12,20 +12,3 @@ ignore = R0902,R0903,R0904,R0912,R0913,R0914,R0916,W0603,W1203,C0413,E0401 # W0603: global statement used to update a global variable # W1203: use lazy % formatting in logging functions # C0413: requires imports at the top of the file - -[pylama:pylint] -max_line_length = 99 -good_names = i,j,k,v,t,m,s,n,x,y,ax,ex,fp,id,tx,fn,pi,wf,db,Run,_ - -[pylama:pycodestyle] -max_line_length = 99 -ignore = E402,W503 -# E402: module level imports should be at the top of the file -# W503: This warns about a line break before a binary operator. I've noticed this -# conflicting with the line length warning several times, especially with -# code using list comprehensions. -# E0401: ignoring imports since beeflow may not be installed when linting - -[pylama:pydocstyle] -max_line_length = 99 -ignore = D203,D213,D404