Skip to content

Commit 186a2b9

Browse files
authored
Merge pull request #6 from RedHatQE/main
[pull] main from RedHatQE:main
2 parents 0d95262 + 6dde694 commit 186a2b9

File tree

299 files changed

+2750
-1289
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

299 files changed

+2750
-1289
lines changed

.pre-commit-config.yaml

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ ci:
1010

1111
repos:
1212
- repo: https://github.com/pre-commit/pre-commit-hooks
13-
rev: "v6.0.0"
13+
rev: v6.0.0
1414
hooks:
1515
- id: check-merge-conflict
1616
- id: debug-statements
@@ -23,7 +23,7 @@ repos:
2323
- id: check-toml
2424

2525
- repo: https://github.com/PyCQA/flake8
26-
rev: "7.3.0"
26+
rev: 7.3.0
2727
hooks:
2828
- id: flake8
2929
args: [--config=.flake8]
@@ -45,18 +45,18 @@ repos:
4545
]
4646

4747
- repo: https://github.com/astral-sh/ruff-pre-commit
48-
rev: v0.13.0
48+
rev: v0.14.5
4949
hooks:
5050
- id: ruff
5151
- id: ruff-format
5252

5353
- repo: https://github.com/gitleaks/gitleaks
54-
rev: v8.28.0
54+
rev: v8.29.0
5555
hooks:
5656
- id: gitleaks
5757

5858
- repo: https://github.com/pre-commit/mirrors-mypy
59-
rev: v1.18.1
59+
rev: v1.18.2
6060
hooks:
6161
- id: mypy
6262
exclude: ^(tests/|examples/|docs/)

VERSION

Lines changed: 0 additions & 1 deletion
This file was deleted.

class_generator/cli.py

Lines changed: 7 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,14 @@
11
"""Command-line interface for the class generator."""
22

33
import fnmatch
4+
import logging
5+
import os
46
import shutil
57
import sys
68
from datetime import datetime
79
from pathlib import Path
810
from typing import Any
911

10-
import logging
11-
1212
import cloup
1313
from cloup.constraints import If, IsSet, accept_none, require_one
1414
from simple_logger.logger import get_logger
@@ -17,7 +17,7 @@
1717
from class_generator.core.coverage import analyze_coverage, generate_report
1818
from class_generator.core.discovery import discover_generated_resources
1919
from class_generator.core.generator import class_generator
20-
from class_generator.core.schema import update_kind_schema, ClusterVersionError
20+
from class_generator.core.schema import ClusterVersionError, update_kind_schema
2121
from class_generator.tests.test_generation import generate_class_generator_tests
2222
from class_generator.utils import execute_parallel_tasks
2323
from ocp_resources.utils.utils import convert_camel_case_to_snake_case
@@ -57,7 +57,7 @@ def handle_schema_update(update_schema: bool, generate_missing: bool) -> bool:
5757
LOGGER.info("Updating resource schema...")
5858
try:
5959
update_kind_schema()
60-
except (RuntimeError, IOError, ClusterVersionError) as e:
60+
except (OSError, RuntimeError, ClusterVersionError) as e:
6161
LOGGER.exception(f"Failed to update schema: {e}")
6262
sys.exit(1)
6363

@@ -251,9 +251,9 @@ def regenerate_single_resource(resource: dict[str, Any]) -> tuple[str, bool, str
251251
return resource_kind, False, str(e)
252252

253253
# Process results from parallel execution
254-
def process_regeneration_result(resource: dict[str, Any], result: tuple[str, bool, str | None]) -> None:
254+
def process_regeneration_result(_resource: dict[str, Any], result: tuple[str, bool, str | None]) -> None:
255255
nonlocal success_count, error_count
256-
resource_kind, success, error = result
256+
_resource_kind, success, _error = result
257257
if success:
258258
success_count += 1
259259
else:
@@ -391,7 +391,7 @@ def generate_with_backup(kind_to_generate: str) -> tuple[str, bool, str | None]:
391391
return kind_to_generate, False, str(e)
392392

393393
# Process results from parallel execution
394-
def process_generation_result(kind_to_generate: str, result: tuple[str, bool, str | None]) -> None:
394+
def process_generation_result(_kind_to_generate: str, result: tuple[str, bool, str | None]) -> None:
395395
nonlocal success_count, error_count, failed_kinds
396396
kind_name, success, error = result
397397
if success:
@@ -447,8 +447,6 @@ def handle_test_generation(add_tests: bool) -> None:
447447

448448
# Run the generated test file
449449
LOGGER.info("Running generated tests...")
450-
import os
451-
452450
test_file = "class_generator/tests/test_class_generator.py"
453451
exit_code = os.system(f"uv run pytest {test_file}")
454452

class_generator/core/coverage.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,7 @@ def analyze_coverage(
6868
continue
6969

7070
try:
71-
with open(filepath, "r") as f:
71+
with open(filepath) as f:
7272
content = f.read()
7373

7474
# Check if file is auto-generated

class_generator/core/discovery.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
"""Discovery functions for finding cluster resources and generated files."""
22

3+
from concurrent.futures import Future, ThreadPoolExecutor, as_completed
34
from pathlib import Path
45
from typing import Any
5-
from concurrent.futures import ThreadPoolExecutor, as_completed, Future
66

77
from kubernetes.dynamic import DynamicClient
88
from simple_logger.logger import get_logger
@@ -221,7 +221,7 @@ def discover_generated_resources() -> list[dict[str, Any]]:
221221
for info in resource_infos:
222222
# Read file to check for user code
223223
try:
224-
with open(info.file_path, "r", encoding="utf-8") as f:
224+
with open(info.file_path, encoding="utf-8") as f:
225225
content = f.read()
226226
except FileNotFoundError:
227227
LOGGER.warning(f"File not found: {info.file_path}, skipping...")

class_generator/core/generator.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -155,7 +155,7 @@ def class_generator(
155155
LOGGER.info("Updating schema")
156156
try:
157157
update_kind_schema()
158-
except (RuntimeError, IOError) as e:
158+
except (OSError, RuntimeError) as e:
159159
error_msg = f"Failed to update schema: {e}"
160160
LOGGER.error(error_msg)
161161
raise RuntimeError(error_msg) from e

class_generator/core/schema.py

Lines changed: 18 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
from simple_logger.logger import get_logger
1313

1414
from class_generator.constants import DEFINITIONS_FILE, RESOURCES_MAPPING_FILE, SCHEMA_DIR
15-
from class_generator.utils import execute_parallel_with_mapping, execute_parallel_tasks
15+
from class_generator.utils import execute_parallel_tasks, execute_parallel_with_mapping
1616
from ocp_resources.utils.archive_utils import save_json_archive
1717
from ocp_resources.utils.schema_validator import SchemaValidator
1818

@@ -136,9 +136,9 @@ def check_and_update_cluster_version(client: str) -> bool:
136136
last_cluster_version_generated: str = ""
137137

138138
try:
139-
with open(cluster_version_file, "r") as fd:
139+
with open(cluster_version_file) as fd:
140140
last_cluster_version_generated = fd.read().strip()
141-
except (FileNotFoundError, IOError):
141+
except (OSError, FileNotFoundError):
142142
# Treat missing file as first run - use baseline version that allows updates
143143
last_cluster_version_generated = "v0.0.0"
144144
LOGGER.info("Cluster version file not found - treating as first run with baseline version v0.0.0")
@@ -530,7 +530,7 @@ def process_schema_definitions(
530530
definitions = {}
531531
if not allow_updates:
532532
try:
533-
with open(DEFINITIONS_FILE, "r") as f:
533+
with open(DEFINITIONS_FILE) as f:
534534
existing_definitions_data = json.load(f)
535535
definitions = existing_definitions_data.get("definitions", {})
536536
LOGGER.info(f"Loaded {len(definitions)} existing definitions to preserve")
@@ -1064,12 +1064,12 @@ def process_explain_result(spec_tuple: tuple[str, str], result: Any) -> None:
10641064
LOGGER.debug(f"Failed to obtain explain data for {ref_name} from {explain_path}")
10651065

10661066
def handle_explain_error(spec_tuple: tuple[str, str], exc: Exception) -> None:
1067-
ref_name, explain_path = spec_tuple
1068-
LOGGER.debug(f"Exception occurred while explaining {ref_name} from {explain_path}: {exc}")
1067+
ref_name, _explain_path = spec_tuple
1068+
LOGGER.debug(f"Exception occurred while explaining {ref_name}: {exc}")
10691069
explain_results[ref_name] = None
10701070

10711071
def create_explain_task(spec_tuple: tuple[str, str]) -> Any:
1072-
ref_name, explain_path = spec_tuple
1072+
_ref_name, explain_path = spec_tuple
10731073
return _run_explain_and_parse(client, explain_path)
10741074

10751075
execute_parallel_tasks(
@@ -1244,7 +1244,7 @@ def process_required_field_result(task_tuple: tuple[str, str], result: Any) -> N
12441244
definitions[schema_key] = updated_schema
12451245

12461246
def handle_required_field_error(task_tuple: tuple[str, str], exc: Exception) -> None:
1247-
schema_key, explain_path = task_tuple
1247+
schema_key, _explain_path = task_tuple
12481248
LOGGER.debug(f"Failed to process required fields for {schema_key}: {exc}")
12491249
# Set empty list if explain fails
12501250
current_schema = definitions[schema_key]
@@ -1307,14 +1307,14 @@ def _get_missing_core_definitions(
13071307
if refs_to_fetch:
13081308

13091309
def process_missing_definition_result(task_tuple: tuple[str, str], result: Any) -> None:
1310-
ref_name, oc_path = task_tuple
1310+
_ref_name, _oc_path = task_tuple
13111311
if result:
13121312
fetched_ref_name, schema = result
13131313
missing_definitions[fetched_ref_name] = schema
13141314
LOGGER.debug(f"Successfully fetched definition for {fetched_ref_name}")
13151315

13161316
def handle_missing_definition_error(task_tuple: tuple[str, str], exc: Exception) -> None:
1317-
ref_name, oc_path = task_tuple
1317+
ref_name, _oc_path = task_tuple
13181318
LOGGER.debug(f"Failed to fetch definition for {ref_name}: {exc}")
13191319

13201320
def create_missing_definition_task(task_tuple: tuple[str, str]) -> Any:
@@ -1360,10 +1360,10 @@ def write_schema_files(
13601360
# Ensure schema directory exists
13611361
try:
13621362
Path(SCHEMA_DIR).mkdir(parents=True, exist_ok=True)
1363-
except (OSError, IOError) as e:
1363+
except OSError as e:
13641364
error_msg = f"Failed to create schema directory {SCHEMA_DIR}: {e}"
13651365
LOGGER.error(error_msg)
1366-
raise IOError(error_msg) from e
1366+
raise OSError(error_msg) from e
13671367

13681368
# Fetch missing core definitions if schemas are available
13691369
if schemas:
@@ -1400,18 +1400,18 @@ def write_schema_files(
14001400
with open(definitions_file, "w") as fd:
14011401
json.dump(definitions_data, fd, indent=2, sort_keys=True)
14021402
LOGGER.info(f"Written {len(definitions)} definitions to {definitions_file}")
1403-
except (OSError, IOError, TypeError) as e:
1403+
except (OSError, TypeError) as e:
14041404
error_msg = f"Failed to write definitions file {definitions_file}: {e}"
14051405
LOGGER.error(error_msg)
1406-
raise IOError(error_msg) from e
1406+
raise OSError(error_msg) from e
14071407

14081408
# Write and archive resources mapping
14091409
try:
14101410
save_json_archive(resources_mapping, RESOURCES_MAPPING_FILE)
1411-
except (OSError, IOError, TypeError) as e:
1411+
except (OSError, TypeError) as e:
14121412
error_msg = f"Failed to save and archive resources mapping file {RESOURCES_MAPPING_FILE}: {e}"
14131413
LOGGER.error(error_msg)
1414-
raise IOError(error_msg) from e
1414+
raise OSError(error_msg) from e
14151415

14161416

14171417
@dataclasses.dataclass
@@ -1523,11 +1523,11 @@ def _handle_no_schemas_case() -> None:
15231523
"""
15241524
LOGGER.info("No schemas fetched. Preserving existing data to avoid overwriting with empty definitions.")
15251525
try:
1526-
with open(DEFINITIONS_FILE, "r") as fd:
1526+
with open(DEFINITIONS_FILE) as fd:
15271527
existing_definitions_data = json.load(fd)
15281528
definitions = existing_definitions_data.get("definitions", {})
15291529
LOGGER.info(f"Found {len(definitions)} existing definitions that will be preserved")
1530-
except (FileNotFoundError, IOError, json.JSONDecodeError):
1530+
except (OSError, FileNotFoundError, json.JSONDecodeError):
15311531
LOGGER.debug("Could not load existing definitions file. No existing definitions to preserve.")
15321532

15331533

class_generator/formatters/template_renderer.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
"""Jinja template rendering for resource generation."""
22

3-
from typing import Any
43
from pathlib import Path
4+
from typing import Any
55

66
from jinja2 import DebugUndefined, Environment, FileSystemLoader, meta
77
from simple_logger.logger import get_logger
@@ -38,7 +38,7 @@ def render_jinja_template(template_dict: dict[Any, Any], template_dir: str, temp
3838
except AttributeError:
3939
# Fallback: read the template file directly
4040
template_path = Path(template_dir) / template_name
41-
with open(template_path, "r", encoding="utf-8") as f:
41+
with open(template_path, encoding="utf-8") as f:
4242
template_source = f.read()
4343

4444
ast = env.parse(source=template_source)

class_generator/parsers/explain_parser.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ def parse_explain(kind: str) -> list[dict[str, Any]]:
5555

5656
# For each API group, select the latest version
5757
filtered_schemas = []
58-
for group, group_schemas in schemas_by_group.items():
58+
for _group, group_schemas in schemas_by_group.items():
5959
if len(group_schemas) > 1:
6060
# Multiple versions in same group - pick latest
6161
versions = []

class_generator/parsers/type_parser.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66

77
from simple_logger.logger import get_logger
88

9-
from class_generator.constants import MISSING_DESCRIPTION_STR, SPEC_STR, DEFINITIONS_FILE
9+
from class_generator.constants import DEFINITIONS_FILE, MISSING_DESCRIPTION_STR, SPEC_STR
1010
from class_generator.utils import sanitize_python_name
1111
from ocp_resources.utils.utils import convert_camel_case_to_snake_case
1212

0 commit comments

Comments
 (0)