Skip to content

Commit f9eb71b

Browse files
authored
feat: widen dbt-core compatibility range (#5211)
1 parent 28cd326 commit f9eb71b

File tree

16 files changed

+249
-75
lines changed

16 files changed

+249
-75
lines changed

.github/workflows/pr.yaml

Lines changed: 60 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,8 @@ on:
88
concurrency:
99
group: 'pr-${{ github.event.pull_request.number }}'
1010
cancel-in-progress: true
11+
permissions:
12+
contents: read
1113
jobs:
1214
test-vscode:
1315
env:
@@ -66,3 +68,61 @@ jobs:
6668
name: playwright-report
6769
path: vscode/extension/playwright-report/
6870
retention-days: 30
71+
test-dbt-versions:
72+
runs-on: ubuntu-latest
73+
strategy:
74+
fail-fast: false
75+
matrix:
76+
dbt-version:
77+
[
78+
'1.3.0',
79+
'1.4.0',
80+
'1.5.0',
81+
'1.6.0',
82+
'1.7.0',
83+
'1.8.0',
84+
'1.9.0',
85+
'1.10.0',
86+
]
87+
steps:
88+
- uses: actions/checkout@v5
89+
- name: Set up Python
90+
uses: actions/setup-python@v5
91+
with:
92+
python-version: '3.10'
93+
- name: Install uv
94+
uses: astral-sh/setup-uv@v6
95+
- name: Install SQLMesh dev dependencies
96+
run: |
97+
uv venv .venv
98+
source .venv/bin/activate
99+
sed -i 's/"pydantic>=2.0.0"/"pydantic"/g' pyproject.toml
100+
if [[ "${{ matrix.dbt-version }}" == "1.10.0" ]]; then
101+
# For 1.10.0: only add version to dbt-core, remove versions from all adapter packages
102+
sed -i -E 's/"(dbt-core)[^"]*"/"\1~=${{ matrix.dbt-version }}"/g' pyproject.toml
103+
# Remove version constraints from all dbt adapter packages
104+
sed -i -E 's/"(dbt-(bigquery|duckdb|snowflake|athena-community|clickhouse|databricks|redshift|trino))[^"]*"/"\1"/g' pyproject.toml
105+
else
106+
# For other versions: apply version to all dbt packages
107+
sed -i -E 's/"(dbt-[^">=<~!]+)[^"]*"/"\1~=${{ matrix.dbt-version }}"/g' pyproject.toml
108+
fi
109+
UV=1 make install-dev
110+
uv pip install pydantic>=2.0.0 --reinstall
111+
- name: Run dbt tests
112+
# We can't run slow tests across all engines due to tests requiring DuckDB and old versions
113+
# of DuckDB require a version of DuckDB we no longer support
114+
run: |
115+
source .venv/bin/activate
116+
make dbt-fast-test
117+
- name: Test SQLMesh info in sushi_dbt
118+
working-directory: ./examples/sushi_dbt
119+
run: |
120+
source ../../.venv/bin/activate
121+
sed -i 's/target: in_memory/target: postgres/g' profiles.yml
122+
if [[ $(echo -e "${{ matrix.dbt-version }}\n1.5.0" | sort -V | head -n1) == "${{ matrix.dbt-version }}" ]] && [[ "${{ matrix.dbt-version }}" != "1.5.0" ]]; then
123+
echo "DBT version is ${{ matrix.dbt-version }} (< 1.5.0), removing version parameters..."
124+
sed -i -e 's/, version=1) }}/) }}/g' -e 's/, v=1) }}/) }}/g' models/top_waiters.sql
125+
else
126+
echo "DBT version is ${{ matrix.dbt-version }} (>= 1.5.0), keeping version parameters"
127+
fi
128+
sqlmesh info --skip-connection

Makefile

Lines changed: 18 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,16 @@
11
.PHONY: docs
22

3+
ifdef UV
4+
PIP := uv pip
5+
else
6+
PIP := pip3
7+
endif
8+
39
install-dev:
4-
pip3 install -e ".[dev,web,slack,dlt,lsp]" ./examples/custom_materializations
10+
$(PIP) install -e ".[dev,web,slack,dlt,lsp]" ./examples/custom_materializations
511

612
install-doc:
7-
pip3 install -r ./docs/requirements.txt
13+
$(PIP) install -r ./docs/requirements.txt
814

915
install-pre-commit:
1016
pre-commit install
@@ -22,16 +28,16 @@ doc-test:
2228
python -m pytest --doctest-modules sqlmesh/core sqlmesh/utils
2329

2430
package:
25-
pip3 install build && python3 -m build
31+
$(PIP) install build && python3 -m build
2632

2733
publish: package
28-
pip3 install twine && python3 -m twine upload dist/*
34+
$(PIP) install twine && python3 -m twine upload dist/*
2935

3036
package-tests:
31-
pip3 install build && cp pyproject.toml tests/sqlmesh_pyproject.toml && python3 -m build tests/
37+
$(PIP) install build && cp pyproject.toml tests/sqlmesh_pyproject.toml && python3 -m build tests/
3238

3339
publish-tests: package-tests
34-
pip3 install twine && python3 -m twine upload -r tobiko-private tests/dist/*
40+
$(PIP) install twine && python3 -m twine upload -r tobiko-private tests/dist/*
3541

3642
docs-serve:
3743
mkdocs serve
@@ -93,6 +99,9 @@ engine-test:
9399
dbt-test:
94100
pytest -n auto -m "dbt and not cicdonly"
95101

102+
dbt-fast-test:
103+
pytest -n auto -m "dbt and fast" --retries 3
104+
96105
github-test:
97106
pytest -n auto -m "github"
98107

@@ -109,7 +118,7 @@ guard-%:
109118
fi
110119

111120
engine-%-install:
112-
pip3 install -e ".[dev,web,slack,lsp,${*}]" ./examples/custom_materializations
121+
$(PIP) install -e ".[dev,web,slack,lsp,${*}]" ./examples/custom_materializations
113122

114123
engine-docker-%-up:
115124
docker compose -f ./tests/core/engine_adapter/integration/docker/compose.${*}.yaml up -d
@@ -157,11 +166,11 @@ snowflake-test: guard-SNOWFLAKE_ACCOUNT guard-SNOWFLAKE_WAREHOUSE guard-SNOWFLAK
157166
pytest -n auto -m "snowflake" --retries 3 --junitxml=test-results/junit-snowflake.xml
158167

159168
bigquery-test: guard-BIGQUERY_KEYFILE engine-bigquery-install
160-
pip install -e ".[bigframes]"
169+
$(PIP) install -e ".[bigframes]"
161170
pytest -n auto -m "bigquery" --retries 3 --junitxml=test-results/junit-bigquery.xml
162171

163172
databricks-test: guard-DATABRICKS_CATALOG guard-DATABRICKS_SERVER_HOSTNAME guard-DATABRICKS_HTTP_PATH guard-DATABRICKS_ACCESS_TOKEN guard-DATABRICKS_CONNECT_VERSION engine-databricks-install
164-
pip install 'databricks-connect==${DATABRICKS_CONNECT_VERSION}'
173+
$(PIP) install 'databricks-connect==${DATABRICKS_CONNECT_VERSION}'
165174
pytest -n auto -m "databricks" --retries 3 --junitxml=test-results/junit-databricks.xml
166175

167176
redshift-test: guard-REDSHIFT_HOST guard-REDSHIFT_USER guard-REDSHIFT_PASSWORD guard-REDSHIFT_DATABASE engine-redshift-install

examples/sushi_dbt/profiles.yml

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,14 @@ sushi:
33
in_memory:
44
type: duckdb
55
schema: sushi
6+
postgres:
7+
type: postgres
8+
host: "host"
9+
user: "user"
10+
password: "password"
11+
dbname: "dbname"
12+
port: 5432
13+
schema: sushi
614
duckdb:
715
type: duckdb
816
path: 'local.duckdb'

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@ bigframes = ["bigframes>=1.32.0"]
5252
clickhouse = ["clickhouse-connect"]
5353
databricks = ["databricks-sql-connector[pyarrow]"]
5454
dev = [
55-
"agate==1.7.1",
55+
"agate",
5656
"beautifulsoup4",
5757
"clickhouse-connect",
5858
"cryptography",

sqlmesh/dbt/loader.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -188,8 +188,11 @@ def _load_projects(self) -> t.List[Project]:
188188

189189
self._projects.append(project)
190190

191-
if project.context.target.database != (self.context.default_catalog or ""):
192-
raise ConfigError("Project default catalog does not match context default catalog")
191+
context_default_catalog = self.context.default_catalog or ""
192+
if project.context.target.database != context_default_catalog:
193+
raise ConfigError(
194+
f"Project default catalog ('{project.context.target.database}') does not match context default catalog ('{context_default_catalog}')."
195+
)
193196
for path in project.project_files:
194197
self._track_file(path)
195198

sqlmesh/dbt/manifest.py

Lines changed: 25 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -13,10 +13,17 @@
1313

1414
from dbt import constants as dbt_constants, flags
1515

16+
from sqlmesh.dbt.util import DBT_VERSION
1617
from sqlmesh.utils.conversions import make_serializable
1718

1819
# Override the file name to prevent dbt commands from invalidating the cache.
19-
dbt_constants.PARTIAL_PARSE_FILE_NAME = "sqlmesh_partial_parse.msgpack"
20+
21+
if DBT_VERSION >= (1, 6, 0):
22+
dbt_constants.PARTIAL_PARSE_FILE_NAME = "sqlmesh_partial_parse.msgpack" # type: ignore
23+
else:
24+
from dbt.parser import manifest as dbt_manifest # type: ignore
25+
26+
dbt_manifest.PARTIAL_PARSE_FILE_NAME = "sqlmesh_partial_parse.msgpack" # type: ignore
2027

2128
import jinja2
2229
from dbt.adapters.factory import register_adapter, reset_adapters
@@ -379,11 +386,17 @@ def _load_on_run_start_end(self) -> None:
379386

380387
if "on-run-start" in node.tags:
381388
self._on_run_start_per_package[node.package_name][node_name] = HookConfig(
382-
sql=sql, index=node.index or 0, path=node_path, dependencies=dependencies
389+
sql=sql,
390+
index=getattr(node, "index", None) or 0,
391+
path=node_path,
392+
dependencies=dependencies,
383393
)
384394
else:
385395
self._on_run_end_per_package[node.package_name][node_name] = HookConfig(
386-
sql=sql, index=node.index or 0, path=node_path, dependencies=dependencies
396+
sql=sql,
397+
index=getattr(node, "index", None) or 0,
398+
path=node_path,
399+
dependencies=dependencies,
387400
)
388401

389402
@property
@@ -599,6 +612,9 @@ def _macro_references(
599612
manifest: Manifest, node: t.Union[ManifestNode, Macro]
600613
) -> t.Set[MacroReference]:
601614
result: t.Set[MacroReference] = set()
615+
if not hasattr(node, "depends_on"):
616+
return result
617+
602618
for macro_node_id in node.depends_on.macros:
603619
if not macro_node_id:
604620
continue
@@ -614,18 +630,20 @@ def _macro_references(
614630

615631
def _refs(node: ManifestNode) -> t.Set[str]:
616632
if DBT_VERSION >= (1, 5, 0):
617-
result = set()
633+
result: t.Set[str] = set()
634+
if not hasattr(node, "refs"):
635+
return result
618636
for r in node.refs:
619-
ref_name = f"{r.package}.{r.name}" if r.package else r.name
637+
ref_name = f"{r.package}.{r.name}" if r.package else r.name # type: ignore
620638
if getattr(r, "version", None):
621-
ref_name = f"{ref_name}_v{r.version}"
639+
ref_name = f"{ref_name}_v{r.version}" # type: ignore
622640
result.add(ref_name)
623641
return result
624642
return {".".join(r) for r in node.refs} # type: ignore
625643

626644

627645
def _sources(node: ManifestNode) -> t.Set[str]:
628-
return {".".join(s) for s in node.sources}
646+
return {".".join(s) for s in getattr(node, "sources", [])}
629647

630648

631649
def _model_node_id(model_name: str, package: str) -> str:

sqlmesh/dbt/relation.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
from sqlmesh.dbt.util import DBT_VERSION
22

33

4-
if DBT_VERSION < (1, 8, 0):
5-
from dbt.contracts.relation import * # type: ignore # noqa: F403
6-
else:
4+
if DBT_VERSION >= (1, 8, 0):
75
from dbt.adapters.contracts.relation import * # type: ignore # noqa: F403
6+
else:
7+
from dbt.contracts.relation import * # type: ignore # noqa: F403

sqlmesh/dbt/seed.py

Lines changed: 26 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -5,11 +5,13 @@
55

66
import agate
77

8-
try:
8+
from sqlmesh.dbt.util import DBT_VERSION
9+
10+
if DBT_VERSION >= (1, 8, 0):
911
from dbt_common.clients import agate_helper # type: ignore
1012

1113
SUPPORTS_DELIMITER = True
12-
except ImportError:
14+
else:
1315
from dbt.clients import agate_helper # type: ignore
1416

1517
SUPPORTS_DELIMITER = False
@@ -95,31 +97,33 @@ def to_sqlmesh(
9597
)
9698

9799

98-
class Integer(agate_helper.Integer):
99-
def cast(self, d: t.Any) -> t.Optional[int]:
100-
if isinstance(d, str):
101-
# The dbt's implementation doesn't support coercion of strings to integers.
102-
if d.strip().lower() in self.null_values:
103-
return None
104-
try:
105-
return int(d)
106-
except ValueError:
107-
raise agate.exceptions.CastError('Can not parse value "%s" as Integer.' % d)
108-
return super().cast(d)
109-
110-
def jsonify(self, d: t.Any) -> str:
111-
return d
112-
113-
114-
agate_helper.Integer = Integer # type: ignore
115-
116-
117100
AGATE_TYPE_MAPPING = {
118-
agate_helper.Integer: exp.DataType.build("int"),
119101
agate_helper.Number: exp.DataType.build("double"),
120102
agate_helper.ISODateTime: exp.DataType.build("datetime"),
121103
agate.Date: exp.DataType.build("date"),
122104
agate.DateTime: exp.DataType.build("datetime"),
123105
agate.Boolean: exp.DataType.build("boolean"),
124106
agate.Text: exp.DataType.build("text"),
125107
}
108+
109+
110+
if DBT_VERSION >= (1, 7, 0):
111+
112+
class Integer(agate_helper.Integer):
113+
def cast(self, d: t.Any) -> t.Optional[int]:
114+
if isinstance(d, str):
115+
# The dbt's implementation doesn't support coercion of strings to integers.
116+
if d.strip().lower() in self.null_values:
117+
return None
118+
try:
119+
return int(d)
120+
except ValueError:
121+
raise agate.exceptions.CastError('Can not parse value "%s" as Integer.' % d)
122+
return super().cast(d)
123+
124+
def jsonify(self, d: t.Any) -> str:
125+
return d
126+
127+
agate_helper.Integer = Integer # type: ignore
128+
129+
AGATE_TYPE_MAPPING[agate_helper.Integer] = exp.DataType.build("int")

sqlmesh/dbt/util.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -20,10 +20,10 @@ def _get_dbt_version() -> t.Tuple[int, int, int]:
2020

2121
DBT_VERSION = _get_dbt_version()
2222

23-
if DBT_VERSION < (1, 8, 0):
24-
from dbt.clients.agate_helper import table_from_data_flat, empty_table, as_matrix # type: ignore # noqa: F401
25-
else:
23+
if DBT_VERSION >= (1, 8, 0):
2624
from dbt_common.clients.agate_helper import table_from_data_flat, empty_table, as_matrix # type: ignore # noqa: F401
25+
else:
26+
from dbt.clients.agate_helper import table_from_data_flat, empty_table, as_matrix # type: ignore # noqa: F401
2727

2828

2929
def pandas_to_agate(df: pd.DataFrame) -> agate.Table:

tests/dbt/conftest.py

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
from sqlmesh.core.context import Context
88
from sqlmesh.dbt.context import DbtContext
99
from sqlmesh.dbt.project import Project
10+
from sqlmesh.dbt.target import PostgresConfig
1011

1112

1213
@pytest.fixture()
@@ -25,3 +26,16 @@ def render(value: str) -> str:
2526
return render
2627

2728
return create_renderer
29+
30+
31+
@pytest.fixture()
32+
def dbt_dummy_postgres_config() -> PostgresConfig:
33+
return PostgresConfig( # type: ignore
34+
name="postgres",
35+
host="host",
36+
user="user",
37+
password="password",
38+
dbname="dbname",
39+
port=5432,
40+
schema="schema",
41+
)

0 commit comments

Comments
 (0)