Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pysqlsync/dialect/mssql/object_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ class MSSQLColumn(Column):
def default_constraint_name(self) -> LocalId:
"The name of the constraint for DEFAULT."

return LocalId(f"df_{self.name.local_id}")
return LocalId(f"df_{self.table_name.local_id}_{self.name.local_id}")

@property
def data_spec(self) -> str:
Expand Down
1 change: 1 addition & 0 deletions pysqlsync/dialect/mysql/discovery.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,7 @@ async def get_columns(self, table_id: SupportsQualifiedId) -> list[Column]:
for col in column_meta:
columns.append(
self.factory.column_class(
table_id,
LocalId(col.column_name),
self.discovery.sql_data_type_from_spec(
type_name=col.data_type,
Expand Down
1 change: 1 addition & 0 deletions pysqlsync/dialect/oracle/discovery.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,6 +152,7 @@ async def get_table(self, table_id: SupportsQualifiedId) -> Table:
identity = bool(col.is_identity)
columns.append(
self.factory.column_class(
table_id,
LocalId(col.column_name),
data_type,
nullable,
Expand Down
1 change: 1 addition & 0 deletions pysqlsync/dialect/postgresql/discovery.py
Original file line number Diff line number Diff line change
Expand Up @@ -277,6 +277,7 @@ async def get_table(self, table_id: SupportsQualifiedId) -> Table:
data_type = SqlArrayType(data_type)
columns.append(
self.factory.column_class(
table_id,
LocalId(col.column_name),
data_type,
bool(col.is_nullable),
Expand Down
2 changes: 2 additions & 0 deletions pysqlsync/formation/discovery.py
Original file line number Diff line number Diff line change
Expand Up @@ -183,6 +183,7 @@ async def _get_columns_limited(self, table_id: SupportsQualifiedId) -> list[Colu
column_name, data_type, nullable = col
columns.append(
self.factory.column_class(
table_id,
LocalId(column_name),
self.discovery.sql_data_type_from_spec(type_name=data_type),
bool(nullable),
Expand Down Expand Up @@ -211,6 +212,7 @@ async def _get_columns_full(self, table_id: SupportsQualifiedId) -> list[Column]
for col in column_meta:
columns.append(
self.factory.column_class(
table_id,
LocalId(col.column_name),
self.discovery.sql_data_type_from_spec(
type_name=col.data_type,
Expand Down
2 changes: 2 additions & 0 deletions pysqlsync/formation/object_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -184,6 +184,7 @@ class Column(DatabaseObject):
"""
A column in a database table.

:param table_name: The name of the table that the column belongs to.
:param name: The name of the column within its host table.
:param data_type: The SQL data type of the column.
:param nullable: True if the column can take the value NULL.
Expand All @@ -192,6 +193,7 @@ class Column(DatabaseObject):
:param description: The textual description of the column.
"""

table_name: SupportsQualifiedId
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I am a little hesitant to add table_name to Column. Originally, I wanted to ensure a top-down hierarchy of dependencies, e.g. a Namespace knows what Table objects it holds but Table objects don't refer back to Namespace, Table objects keep track of Column objects they hold but a Column doesn't have a back-reference, etc. This has some favorable properties, e.g. when you drop, move or rename objects, you can avoid recursive updates to the object model. Today, the code doesn't have logic to check or update names when a parent object changes, specifically to update all Column objects when a Table is moved or renamed.

On the other hand, the use of default_constraint_name seems localized. It is defined on MSSQLColumn. One of the callers, mutate_table_stmt has access to a Table object whose name it could pass to default_constraint_name. Unfortunately, data_spec in Column is trickier, it's used both directly and via column_spec, in contexts where no Table object seems readily available. Likewise, drop_stmt has some contextual dependency. Let me look into this in more depth.

Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

One idea I have in mind is to treat DEFAULT similar to other constraints, i.e. extend the implementation in add_constraints_stmt and drop_constraints_stmt, both of which have access to the MSSQLTable instance. When I look at mutate_table_stmt in MSSQLMutator, I already see this approach in place.

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thank you, I get your point, I'll check that approach.

Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I explored a third option, which is to add the constraint name as an attribute exclusive to the MSSQLColumn class.

Unfortunately, this approach has some ripple effects in terms of syntax, specifically how Column (sub-)classes are constructed; the class Column had attributes with default initial values, which would normally cause MSSQLColumn to have attributes with default initial values only, which forced me to introduce a factory function create on Column to keep the possibility of optional arguments but not enforce anything on derived classes. Furthermore, I had to modify database object discovery for Microsoft SQL Server to fetch the constraint name for a default column. The constraint name for default columns (intentionally) doesn't participate in testing object equality.

These changes are pushed to the branch dev as the approach may need some refinement. In particular, tests fail due to the random strings, we will need to set a fixed seed for reproducible tests (or use string equality assertions with masking).

Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I have implemented string comparison with masking enabled, allowing randomly generated character sequences to be ignored in unit and integration tests. Test execution now yields green on the branch dev.

name: LocalId
data_type: SqlDataType
nullable: bool
Expand Down
29 changes: 21 additions & 8 deletions pysqlsync/formation/py_to_sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -662,7 +662,11 @@ def member_to_sql_data_type(self, typ: TypeLike, cls: type) -> SqlDataType:
raise TypeError(f"unsupported data type: {typ}")

def member_to_column(
self, field: DataclassField, cls: type[DataclassInstance], doc: Docstring
self,
table_name: SupportsQualifiedId,
field: DataclassField,
cls: type[DataclassInstance],
doc: Docstring,
) -> Column:
"Converts a data-class field into a SQL table column."

Expand Down Expand Up @@ -724,6 +728,7 @@ def member_to_column(
)

return self.options.factory.column_class(
table_name=table_name,
name=LocalId(field.name),
data_type=data_type,
nullable=props.nullable,
Expand All @@ -739,10 +744,11 @@ def dataclass_to_table(self, cls: type[DataclassInstance]) -> Table:
raise TypeError(f"expected: dataclass type; got: {cls}")

doc = parse_type(cls)
id = self.create_qualified_id(cls.__module__, cls.__name__)

try:
columns = [
self.member_to_column(field, cls, doc)
self.member_to_column(id, field, cls, doc)
for field in dataclass_fields(cls)
if self._get_relationship(field.type) is None
]
Expand Down Expand Up @@ -809,7 +815,7 @@ def dataclass_to_table(self, cls: type[DataclassInstance]) -> Table:
)

return self.options.factory.table_class(
name=self.create_qualified_id(cls.__module__, cls.__name__),
name=id,
columns=columns,
primary_key=(LocalId(dataclass_primary_key_name(cls)),),
constraints=constraints or None,
Expand Down Expand Up @@ -932,6 +938,7 @@ def _enum_table(self, enum_type: type[enum.Enum]) -> Table:
id = self.create_qualified_id(enum_type.__module__, enum_table_name)
columns = [
self.options.factory.column_class(
id,
LocalId("id"),
self._enumeration_key_type(),
False,
Expand All @@ -953,6 +960,7 @@ def _enum_table(self, enum_type: type[enum.Enum]) -> Table:
if unadorned_member_type is int or unadorned_member_type is str:
columns.append(
self.options.factory.column_class(
id,
LocalId("value"),
self.member_to_sql_data_type(ENUM_LABEL_TYPE, type(None)),
False,
Expand All @@ -967,7 +975,7 @@ def _enum_table(self, enum_type: type[enum.Enum]) -> Table:
elif is_dataclass_type(unadorned_member_type):
columns.extend(
self.member_to_column(
field, enum_member_type, parse_type(unadorned_member_type)
id, field, enum_member_type, parse_type(unadorned_member_type)
)
for field in dataclass_fields(unadorned_member_type)
)
Expand Down Expand Up @@ -1148,26 +1156,31 @@ def dataclasses_to_catalog(
)

table_defs = tables.setdefault(entity.__module__, [])
table_id = self.create_qualified_id(
entity.__module__,
table_name,
)

table_defs.append(
self.options.factory.table_class(
self.create_qualified_id(
entity.__module__,
table_name,
),
table_id,
[
self.options.factory.column_class(
table_id,
LocalId("uuid"),
self.member_to_sql_data_type(uuid.UUID, entity),
False,
),
self.options.factory.column_class(
table_id,
LocalId(column_left_name),
self.member_to_sql_data_type(
dataclass_primary_key_type(entity), entity
),
False,
),
self.options.factory.column_class(
table_id,
LocalId(column_right_name),
primary_right_type,
False,
Expand Down
Loading