77from sqlalchemy .sql .type_api import TypeEngine
88
99
10- # class DatabricksColumn(DefaultColumn):
11- # """Represents a column in a databricks table."""
12- # def __init__(self, *args, liquid_cluster=False, **kwargs):
13- # self.liquid_cluster = liquid_cluster
14- # super().__init__(*args, **kwargs)
15-
16-
1710class DatabricksIdentifierPreparer (compiler .IdentifierPreparer ):
1811# SparkSQL identifier specification:
1912# ref: https://spark.apache.org/docs/latest/sql-ref-identifier.html
@@ -28,34 +21,6 @@ class DatabricksDDLCompiler(compiler.DDLCompiler):
2821def post_create_table (self ,table ):
2922return " USING DELTA"
3023
31- # def get_column_specification(self, column: DatabricksColumn, **kwargs):
32- # colspec = (
33- # self.preparer.format_column(column)
34- # + " "
35- # + self.dialect.type_compiler.process(
36- # column.type, type_expression=column
37- # )
38- # )
39- #
40- # default = self.get_column_default_string(column)
41- # if default is not None:
42- # colspec += " DEFAULT " + default
43- #
44- # if column.computed is not None:
45- # colspec += " " + self.process(column.computed)
46- #
47- # if (
48- # column.identity is not None
49- # and self.dialect.supports_identity_columns
50- # ):
51- # colspec += " " + self.process(column.identity)
52- #
53- # if not column.nullable and (
54- # not column.identity or not self.dialect.supports_identity_columns
55- # ):
56- # colspec += " NOT NULL"
57- # return colspec
58-
5924def visit_set_column_comment (self ,create ,** kw ):
6025"""
6126 Example syntax for adding column comment:
@@ -150,7 +115,7 @@ def visit_create_table(self, create, **kw):
150115from_ = ce ,
151116 )
152117
153- # Check and apply liquid clustering
118+ # Checkfor and apply liquid clustering
154119if 'databricks' in column .dialect_options :
155120try :
156121cluster_on = column .dialect_options ['databricks' ].__getitem__ ('cluster_key' )
@@ -184,34 +149,6 @@ def visit_drop_table(self, drop, **kw):
184149
185150return text + self .preparer .format_table (drop .element )
186151
187- # def visit_create_column(self, create, first_pk=False, **kw):
188- # column = create.element
189-
190- # if column.system:
191- # return None
192-
193- # text = self.get_column_specification(column, first_pk=first_pk)
194- # const = " ".join(
195- # self.process(constraint) for constraint in column.constraints
196- # )
197- # if const:
198- # text += " " + const
199-
200- # # Code to deal with NOT NULL being unsupported in ADD COLUMNS clause
201- # if "NOT NULL" in text:
202- # text.replace("NOT NULL", "")
203- # text += """;
204- # ALTER TABLE {0} ALTER COLUMN {1} SET NOT NULL;
205- # """.format(
206- # self._format_table_from_column(
207- # create, use_schema=True
208- # ),
209- # self.preparer.format_column(
210- # create.element, use_table=False
211- # )
212- # )
213- # return text
214-
215152
216153@compiles (ColumnComment ,"databricks" )
217154def visit_column_comment (
@@ -231,18 +168,3 @@ def visit_column_comment(
231168column_name = element .column_name ,
232169comment = comment ,
233170 )
234-
235-
236- # @compiles(ColumnType, "databricks")
237- # def visit_column_type(element: ColumnType, compiler: DatabricksDDLCompiler, **kw) -> str:
238- #
239- #
240- # return "%s %s %s" % (
241- # alter_table(compiler, element.table_name, element.schema),
242- # alter_column(compiler, element.column_name),
243- # "TYPE %s" % format_type(compiler, element.type_),
244- # )
245- #
246- #
247- # def format_type(compiler: DatabricksDDLCompiler, type_: TypeEngine) -> str:
248- # return compiler.dialect.type_compiler.process(type_)