Skip to content

Commit 0b87629

Browse files
committed
Seeds are now handled in evaluator
1 parent 6be78ee commit 0b87629

File tree

4 files changed

+11
-13
lines changed

4 files changed

+11
-13
lines changed

sqlmesh/core/engine_adapter/databricks.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -403,8 +403,8 @@ def _record_execution_stats(
403403
if history_df is not None and not history_df.empty:
404404
write_df = history_df[history_df["operation"] == "WRITE"]
405405
write_df = write_df[write_df["timestamp"] == write_df["timestamp"].max()]
406-
if not write_df.empty:
407-
metrics = write_df["operationMetrics"][0]
406+
if not write_df.empty and "operationMetrics" in write_df.columns:
407+
metrics = write_df["operationMetrics"].iloc[0]
408408
if metrics:
409409
rowcount = None
410410
rowcount_str = [

sqlmesh/core/scheduler.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -540,21 +540,21 @@ def run_node(node: SchedulingUnit) -> None:
540540
)
541541

542542
self.console.update_snapshot_evaluation_progress(
543-
snapshot,
544-
batched_intervals[snapshot][node.batch_index],
545-
node.batch_index,
546-
evaluation_duration_ms,
547-
num_audits - num_audits_failed,
548-
num_audits_failed,
549-
execution_stats=execution_stats,
543+
snapshot,
544+
batched_intervals[snapshot][node.batch_index],
545+
node.batch_index,
546+
evaluation_duration_ms,
547+
num_audits - num_audits_failed,
548+
num_audits_failed,
549+
execution_stats=execution_stats,
550550
)
551551
elif isinstance(node, CreateNode):
552552
self.snapshot_evaluator.create_snapshot(
553553
snapshot=snapshot,
554554
snapshots=self.snapshots_by_name,
555555
deployability_index=deployability_index,
556556
allow_destructive_snapshots=allow_destructive_snapshots or set(),
557-
)
557+
)
558558

559559
try:
560560
with self.snapshot_evaluator.concurrent_context():

tests/core/engine_adapter/integration/test_integration.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2449,13 +2449,12 @@ def capture_execution_stats(
24492449
assert len(physical_layer_results.tables) == len(physical_layer_results.non_temp_tables) == 3
24502450

24512451
if ctx.engine_adapter.SUPPORTS_QUERY_EXECUTION_TRACKING:
2452+
assert actual_execution_stats["seed_model"].total_rows_processed == 7
24522453
assert actual_execution_stats["incremental_model"].total_rows_processed == 7
24532454
# snowflake doesn't track rows for CTAS
24542455
assert actual_execution_stats["full_model"].total_rows_processed == (
24552456
None if ctx.mark.startswith("snowflake") else 3
24562457
)
2457-
# seed rows aren't tracked
2458-
assert actual_execution_stats["seed_model"].total_rows_processed is None
24592458

24602459
if ctx.mark.startswith("bigquery") or ctx.mark.startswith("databricks"):
24612460
assert actual_execution_stats["incremental_model"].total_bytes_processed is not None

tests/core/test_snapshot_evaluator.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -617,7 +617,6 @@ def test_evaluate_materialized_view_with_partitioned_by_cluster_by(
617617

618618
execute_mock.assert_has_calls(
619619
[
620-
call("CREATE SCHEMA IF NOT EXISTS `sqlmesh__test_schema`", False),
621620
call(
622621
f"CREATE MATERIALIZED VIEW `sqlmesh__test_schema`.`test_schema__test_model__{snapshot.version}` PARTITION BY `a` CLUSTER BY `b` AS SELECT `a` AS `a`, `b` AS `b` FROM `tbl` AS `tbl`",
623622
False,

0 commit comments

Comments
 (0)