Skip to content

Commit fa7f2e6

Browse files
chore: bump ruff to 0.11.5, apply auto-fixes (#478)
Co-authored-by: Devin AI <158243242+devin-ai-integration[bot]@users.noreply.github.com> Co-authored-by: Aaron <AJ> Steers <[email protected]>
1 parent 836d587 commit fa7f2e6

29 files changed

+141
-142
lines changed

.pre-commit-config.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ repos:
3636
- id: check-toml
3737

3838
- repo: https://github.com/astral-sh/ruff-pre-commit
39-
rev: v0.8.3
39+
rev: v0.11.5
4040
hooks:
4141
# Run the linter with repo-defined settings
4242
- id: ruff

airbyte_cdk/connector_builder/main.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -78,9 +78,9 @@ def handle_connector_builder_request(
7878
if command == "resolve_manifest":
7979
return resolve_manifest(source)
8080
elif command == "test_read":
81-
assert (
82-
catalog is not None
83-
), "`test_read` requires a valid `ConfiguredAirbyteCatalog`, got None."
81+
assert catalog is not None, (
82+
"`test_read` requires a valid `ConfiguredAirbyteCatalog`, got None."
83+
)
8484
return read_stream(source, config, catalog, state, limits)
8585
elif command == "full_resolve_manifest":
8686
return full_resolve_manifest(source, limits)

airbyte_cdk/sources/concurrent_source/concurrent_source.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -49,9 +49,9 @@ def create(
4949
too_many_generator = (
5050
not is_single_threaded and initial_number_of_partitions_to_generate >= num_workers
5151
)
52-
assert (
53-
not too_many_generator
54-
), "It is required to have more workers than threads generating partitions"
52+
assert not too_many_generator, (
53+
"It is required to have more workers than threads generating partitions"
54+
)
5555
threadpool = ThreadPoolManager(
5656
concurrent.futures.ThreadPoolExecutor(
5757
max_workers=num_workers, thread_name_prefix="workerpool"

airbyte_cdk/sources/file_based/file_based_source.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -282,9 +282,9 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]:
282282
and hasattr(self, "_concurrency_level")
283283
and self._concurrency_level is not None
284284
):
285-
assert (
286-
state_manager is not None
287-
), "No ConnectorStateManager was created, but it is required for incremental syncs. This is unexpected. Please contact Support."
285+
assert state_manager is not None, (
286+
"No ConnectorStateManager was created, but it is required for incremental syncs. This is unexpected. Please contact Support."
287+
)
288288

289289
cursor = self.cursor_cls(
290290
stream_config,

airbyte_cdk/sources/file_based/file_types/avro_parser.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -154,7 +154,7 @@ def _convert_avro_type_to_json(
154154
# For example: ^-?\d{1,5}(?:\.\d{1,3})?$ would accept 12345.123 and 123456.12345 would be rejected
155155
return {
156156
"type": "string",
157-
"pattern": f"^-?\\d{{{1,max_whole_number_range}}}(?:\\.\\d{1,decimal_range})?$",
157+
"pattern": f"^-?\\d{{{1, max_whole_number_range}}}(?:\\.\\d{1, decimal_range})?$",
158158
}
159159
elif "logicalType" in avro_field:
160160
if avro_field["logicalType"] not in AVRO_LOGICAL_TYPE_TO_JSON:

airbyte_cdk/sources/file_based/stream/concurrent/adapters.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -284,9 +284,9 @@ def read(self) -> Iterable[Record]:
284284
def to_slice(self) -> Optional[Mapping[str, Any]]:
285285
if self._slice is None:
286286
return None
287-
assert (
288-
len(self._slice["files"]) == 1
289-
), f"Expected 1 file per partition but got {len(self._slice['files'])} for stream {self.stream_name()}"
287+
assert len(self._slice["files"]) == 1, (
288+
f"Expected 1 file per partition but got {len(self._slice['files'])} for stream {self.stream_name()}"
289+
)
290290
file = self._slice["files"][0]
291291
return {"files": [file]}
292292

airbyte_cdk/sql/shared/sql_processor.py

Lines changed: 8 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -326,9 +326,9 @@ def _ensure_schema_exists(
326326

327327
if DEBUG_MODE:
328328
found_schemas = schemas_list
329-
assert (
330-
schema_name in found_schemas
331-
), f"Schema {schema_name} was not created. Found: {found_schemas}"
329+
assert schema_name in found_schemas, (
330+
f"Schema {schema_name} was not created. Found: {found_schemas}"
331+
)
332332

333333
def _quote_identifier(self, identifier: str) -> str:
334334
"""Return the given identifier, quoted."""
@@ -617,10 +617,10 @@ def _append_temp_table_to_final_table(
617617
self._execute_sql(
618618
f"""
619619
INSERT INTO {self._fully_qualified(final_table_name)} (
620-
{f',{nl} '.join(columns)}
620+
{f",{nl} ".join(columns)}
621621
)
622622
SELECT
623-
{f',{nl} '.join(columns)}
623+
{f",{nl} ".join(columns)}
624624
FROM {self._fully_qualified(temp_table_name)}
625625
""",
626626
)
@@ -645,8 +645,7 @@ def _swap_temp_table_with_final_table(
645645
deletion_name = f"{final_table_name}_deleteme"
646646
commands = "\n".join(
647647
[
648-
f"ALTER TABLE {self._fully_qualified(final_table_name)} RENAME "
649-
f"TO {deletion_name};",
648+
f"ALTER TABLE {self._fully_qualified(final_table_name)} RENAME TO {deletion_name};",
650649
f"ALTER TABLE {self._fully_qualified(temp_table_name)} RENAME "
651650
f"TO {final_table_name};",
652651
f"DROP TABLE {self._fully_qualified(deletion_name)};",
@@ -686,10 +685,10 @@ def _merge_temp_table_to_final_table(
686685
{set_clause}
687686
WHEN NOT MATCHED THEN INSERT
688687
(
689-
{f',{nl} '.join(columns)}
688+
{f",{nl} ".join(columns)}
690689
)
691690
VALUES (
692-
tmp.{f',{nl} tmp.'.join(columns)}
691+
tmp.{f",{nl} tmp.".join(columns)}
693692
);
694693
""",
695694
)

poetry.lock

Lines changed: 20 additions & 20 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,7 @@ whenever = "^0.6.16"
8888
freezegun = "*"
8989
mypy = "*"
9090
asyncio = "3.4.3"
91-
ruff = "^0.7.2"
91+
ruff = "^0.11.5"
9292
pdoc = "^15.0.0"
9393
poethepoet = "^0.24.2"
9494
pyproject-flake8 = "^6.1.0"

unit_tests/connector_builder/test_connector_builder_handler.py

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -124,7 +124,7 @@
124124
"values": ["0", "1", "2", "3", "4", "5", "6", "7"],
125125
"cursor_field": "item_id",
126126
},
127-
"" "requester": {
127+
"requester": {
128128
"path": "/v3/marketing/lists",
129129
"authenticator": {
130130
"type": "BearerAuthenticator",
@@ -175,7 +175,7 @@
175175
"values": ["0", "1", "2", "3", "4", "5", "6", "7"],
176176
"cursor_field": "item_id",
177177
},
178-
"" "requester": {
178+
"requester": {
179179
"path": "/v3/marketing/lists",
180180
"authenticator": {
181181
"type": "BearerAuthenticator",
@@ -348,7 +348,7 @@
348348
"values": ["0", "1", "2", "3", "4", "5", "6", "7"],
349349
"cursor_field": "item_id",
350350
},
351-
"" "requester": {
351+
"requester": {
352352
"path": "/v3/marketing/lists",
353353
"authenticator": {"type": "OAuthAuthenticator", "api_token": "{{ config.apikey }}"},
354354
"request_parameters": {"a_param": "10"},
@@ -1221,9 +1221,9 @@ def test_handle_read_external_requests(deployment_mode, url_base, expected_error
12211221
source, config, catalog, _A_PER_PARTITION_STATE, limits
12221222
).record.data
12231223
if expected_error:
1224-
assert (
1225-
len(output_data["logs"]) > 0
1226-
), "Expected at least one log message with the expected error"
1224+
assert len(output_data["logs"]) > 0, (
1225+
"Expected at least one log message with the expected error"
1226+
)
12271227
error_message = output_data["logs"][0]
12281228
assert error_message["level"] == "ERROR"
12291229
assert expected_error in error_message["stacktrace"]
@@ -1317,9 +1317,9 @@ def test_handle_read_external_oauth_request(deployment_mode, token_url, expected
13171317
source, config, catalog, _A_PER_PARTITION_STATE, limits
13181318
).record.data
13191319
if expected_error:
1320-
assert (
1321-
len(output_data["logs"]) > 0
1322-
), "Expected at least one log message with the expected error"
1320+
assert len(output_data["logs"]) > 0, (
1321+
"Expected at least one log message with the expected error"
1322+
)
13231323
error_message = output_data["logs"][0]
13241324
assert error_message["level"] == "ERROR"
13251325
assert expected_error in error_message["stacktrace"]

unit_tests/destinations/test_destination.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -58,9 +58,9 @@ def test_successful_parse(
5858
self, arg_list: List[str], expected_output: Mapping[str, Any], destination: Destination
5959
):
6060
parsed_args = vars(destination.parse_args(arg_list))
61-
assert (
62-
parsed_args == expected_output
63-
), f"Expected parsing {arg_list} to return parsed args {expected_output} but instead found {parsed_args}"
61+
assert parsed_args == expected_output, (
62+
f"Expected parsing {arg_list} to return parsed args {expected_output} but instead found {parsed_args}"
63+
)
6464

6565
@pytest.mark.parametrize(
6666
("arg_list"),

unit_tests/source_declarative_manifest/test_source_declarative_w_custom_components.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -100,9 +100,9 @@ def get_py_components_config_dict(
100100

101101
manifest_dict = yaml.safe_load(manifest_yml_path.read_text())
102102
assert manifest_dict, "Failed to load the manifest file."
103-
assert isinstance(
104-
manifest_dict, Mapping
105-
), f"Manifest file is type {type(manifest_dict).__name__}, not a mapping: {manifest_dict}"
103+
assert isinstance(manifest_dict, Mapping), (
104+
f"Manifest file is type {type(manifest_dict).__name__}, not a mapping: {manifest_dict}"
105+
)
106106

107107
custom_py_code = custom_py_code_path.read_text()
108108
combined_config_dict = {

unit_tests/sources/declarative/auth/test_session_token_auth.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -189,7 +189,7 @@ def test_get_new_session_token(requests_mock):
189189
)
190190

191191
session_token = get_new_session_token(
192-
f'{config["instance_api_url"]}session',
192+
f"{config['instance_api_url']}session",
193193
config["username"],
194194
config["password"],
195195
config["session_token_response_key"],

unit_tests/sources/declarative/incremental/test_concurrent_perpartitioncursor.py

Lines changed: 13 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -363,9 +363,9 @@ def run_mocked_test(
363363
request_count = len(
364364
[req for req in m.request_history if unquote(req.url) == unquote(url)]
365365
)
366-
assert (
367-
request_count == 1
368-
), f"URL {url} was called {request_count} times, expected exactly once."
366+
assert request_count == 1, (
367+
f"URL {url} was called {request_count} times, expected exactly once."
368+
)
369369

370370

371371
def _run_read(
@@ -855,10 +855,11 @@ def run_incremental_parent_state_test(
855855
expected_records_set = list(
856856
{orjson.dumps(record): record for record in expected_records}.values()
857857
)
858-
assert (
859-
sorted(cumulative_records_state_deduped, key=lambda x: x["id"])
860-
== sorted(expected_records_set, key=lambda x: x["id"])
861-
), f"Records mismatch with intermediate state {state}. Expected {expected_records}, got {cumulative_records_state_deduped}"
858+
assert sorted(cumulative_records_state_deduped, key=lambda x: x["id"]) == sorted(
859+
expected_records_set, key=lambda x: x["id"]
860+
), (
861+
f"Records mismatch with intermediate state {state}. Expected {expected_records}, got {cumulative_records_state_deduped}"
862+
)
862863

863864
# Store the final state after each intermediate read
864865
final_state_intermediate = [
@@ -869,9 +870,9 @@ def run_incremental_parent_state_test(
869870

870871
# Assert that the final state matches the expected state for all runs
871872
for i, final_state in enumerate(final_states):
872-
assert (
873-
final_state in expected_states
874-
), f"Final state mismatch at run {i + 1}. Expected {expected_states}, got {final_state}"
873+
assert final_state in expected_states, (
874+
f"Final state mismatch at run {i + 1}. Expected {expected_states}, got {final_state}"
875+
)
875876

876877

877878
@pytest.mark.parametrize(
@@ -1300,8 +1301,7 @@ def test_incremental_parent_state(
13001301
{"id": 11, "post_id": 1, "updated_at": COMMENT_11_UPDATED_AT},
13011302
],
13021303
"next_page": (
1303-
"https://api.example.com/community/posts/1/comments"
1304-
"?per_page=100&page=2"
1304+
"https://api.example.com/community/posts/1/comments?per_page=100&page=2"
13051305
),
13061306
},
13071307
),
@@ -1346,8 +1346,7 @@ def test_incremental_parent_state(
13461346
{
13471347
"comments": [{"id": 20, "post_id": 2, "updated_at": COMMENT_20_UPDATED_AT}],
13481348
"next_page": (
1349-
"https://api.example.com/community/posts/2/comments"
1350-
"?per_page=100&page=2"
1349+
"https://api.example.com/community/posts/2/comments?per_page=100&page=2"
13511350
),
13521351
},
13531352
),

unit_tests/sources/declarative/parsers/test_model_to_component_factory.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3413,9 +3413,9 @@ def migrate(self, stream_state: Mapping[str, Any]) -> Mapping[str, Any]:
34133413
stream_state_migrations=[DummyStateMigration()],
34143414
)
34153415
assert cursor.state["lookback_window"] != 10, "State migration wasn't called"
3416-
assert (
3417-
cursor.state["lookback_window"] == 20
3418-
), "State migration was called, but actual state don't match expected"
3416+
assert cursor.state["lookback_window"] == 20, (
3417+
"State migration was called, but actual state don't match expected"
3418+
)
34193419

34203420

34213421
def test_create_concurrent_cursor_uses_min_max_datetime_format_if_defined():

0 commit comments

Comments
 (0)