|
2 | 2 | fromalembic.ddl.baseimportColumnComment,ColumnType |
3 | 3 | fromsqlalchemyimportutil,exc |
4 | 4 | fromsqlalchemy.sqlimportcompiler,sqltypes,ColumnElement |
| 5 | +fromsqlalchemy.sql.schemaimportColumnasDefaultColumn |
| 6 | +fromsqlalchemy.sql.schemaimportColumnDefault,Sequence,DefaultClause,FetchedValue |
5 | 7 | fromsqlalchemy.ext.compilerimportcompiles |
6 | 8 | fromsqlalchemy.sql.type_apiimportTypeEngine |
| 9 | +fromsqlalchemy.sql.baseimportSchemaEventTarget |
| 10 | +fromsqlalchemy.sql.elementsimportquoted_name |
| 11 | + |
| 12 | + |
| 13 | +classColumn(DefaultColumn): |
| 14 | +"""Represents a column in a databricks table.""" |
| 15 | + |
| 16 | +__visit_name__="column" |
| 17 | + |
| 18 | +inherit_cache=True |
| 19 | + |
| 20 | +def__init__(self,*args,**kwargs): |
| 21 | +name=kwargs.pop("name",None) |
| 22 | +type_=kwargs.pop("type_",None) |
| 23 | +args=list(args) |
| 24 | +ifargs: |
| 25 | +ifisinstance(args[0],util.string_types): |
| 26 | +ifnameisnotNone: |
| 27 | +raiseexc.ArgumentError( |
| 28 | +"May not pass name positionally and as a keyword." |
| 29 | + ) |
| 30 | +name=args.pop(0) |
| 31 | +ifargs: |
| 32 | +coltype=args[0] |
| 33 | + |
| 34 | +ifhasattr(coltype,"_sqla_type"): |
| 35 | +iftype_isnotNone: |
| 36 | +raiseexc.ArgumentError( |
| 37 | +"May not pass type_ positionally and as a keyword." |
| 38 | + ) |
| 39 | +type_=args.pop(0) |
| 40 | + |
| 41 | +ifnameisnotNone: |
| 42 | +name=quoted_name(name,kwargs.pop("quote",None)) |
| 43 | +elif"quote"inkwargs: |
| 44 | +raiseexc.ArgumentError( |
| 45 | +"Explicit 'name' is required when ""sending 'quote' argument" |
| 46 | + ) |
| 47 | + |
| 48 | +super(Column,self).__init__(name,type_) |
| 49 | +self.key=kwargs.pop("key",name) |
| 50 | +self.primary_key=primary_key=kwargs.pop("primary_key",False) |
| 51 | + |
| 52 | +self._user_defined_nullable=udn=kwargs.pop( |
| 53 | +"nullable",NULL_UNSPECIFIED |
| 54 | + ) |
| 55 | + |
| 56 | +ifudnisnotNULL_UNSPECIFIED: |
| 57 | +self.nullable=udn |
| 58 | +else: |
| 59 | +self.nullable=notprimary_key |
| 60 | + |
| 61 | +self.default=kwargs.pop("default",None) |
| 62 | +self.server_default=kwargs.pop("server_default",None) |
| 63 | +self.server_onupdate=kwargs.pop("server_onupdate",None) |
| 64 | + |
| 65 | +# these default to None because .index and .unique is *not* |
| 66 | +# an informational flag about Column - there can still be an |
| 67 | +# Index or UniqueConstraint referring to this Column. |
| 68 | +self.index=kwargs.pop("index",None) |
| 69 | +self.unique=kwargs.pop("unique",None) |
| 70 | + |
| 71 | +self.system=kwargs.pop("system",False) |
| 72 | +self.doc=kwargs.pop("doc",None) |
| 73 | +self.onupdate=kwargs.pop("onupdate",None) |
| 74 | +self.autoincrement=kwargs.pop("autoincrement","auto") |
| 75 | +self.constraints=set() |
| 76 | +self.foreign_keys=set() |
| 77 | +self.comment=kwargs.pop("comment",None) |
| 78 | +self.computed=None |
| 79 | +self.identity=None |
| 80 | +self.liquid_cluster=kwargs.pop("liquid_cluster",None) |
| 81 | + |
| 82 | +# check if this Column is proxying another column |
| 83 | +if"_proxies"inkwargs: |
| 84 | +self._proxies=kwargs.pop("_proxies") |
| 85 | +# otherwise, add DDL-related events |
| 86 | +elifisinstance(self.type,SchemaEventTarget): |
| 87 | +self.type._set_parent_with_dispatch(self) |
| 88 | + |
| 89 | +ifself.defaultisnotNone: |
| 90 | +ifisinstance(self.default, (ColumnDefault,Sequence)): |
| 91 | +args.append(self.default) |
| 92 | +else: |
| 93 | +ifgetattr(self.type,"_warn_on_bytestring",False): |
| 94 | +ifisinstance(self.default,util.binary_type): |
| 95 | +util.warn( |
| 96 | +"Unicode column '%s' has non-unicode " |
| 97 | +"default value %r specified." |
| 98 | +% (self.key,self.default) |
| 99 | + ) |
| 100 | +args.append(ColumnDefault(self.default)) |
| 101 | + |
| 102 | +ifself.server_defaultisnotNone: |
| 103 | +ifisinstance(self.server_default,FetchedValue): |
| 104 | +args.append(self.server_default._as_for_update(False)) |
| 105 | +else: |
| 106 | +args.append(DefaultClause(self.server_default)) |
| 107 | + |
| 108 | +ifself.onupdateisnotNone: |
| 109 | +ifisinstance(self.onupdate, (ColumnDefault,Sequence)): |
| 110 | +args.append(self.onupdate) |
| 111 | +else: |
| 112 | +args.append(ColumnDefault(self.onupdate,for_update=True)) |
| 113 | + |
| 114 | +ifself.server_onupdateisnotNone: |
| 115 | +ifisinstance(self.server_onupdate,FetchedValue): |
| 116 | +args.append(self.server_onupdate._as_for_update(True)) |
| 117 | +else: |
| 118 | +args.append( |
| 119 | +DefaultClause(self.server_onupdate,for_update=True) |
| 120 | + ) |
| 121 | +self._init_items(*args) |
| 122 | + |
| 123 | +util.set_creation_order(self) |
| 124 | + |
| 125 | +if"info"inkwargs: |
| 126 | +self.info=kwargs.pop("info") |
| 127 | + |
| 128 | +self._extra_kwargs(**kwargs) |
| 129 | + |
| 130 | +foreign_keys=None |
| 131 | +"""A collection of all :class:`_schema.ForeignKey` marker objects |
| 132 | + associated with this :class:`_schema.Column`. |
| 133 | +
|
| 134 | + Each object is a member of a :class:`_schema.Table`-wide |
| 135 | + :class:`_schema.ForeignKeyConstraint`. |
| 136 | +
|
| 137 | + .. seealso:: |
| 138 | +
|
| 139 | + :attr:`_schema.Table.foreign_keys` |
| 140 | +
|
| 141 | + """ |
| 142 | + |
| 143 | +index=None |
| 144 | +"""The value of the :paramref:`_schema.Column.index` parameter. |
| 145 | +
|
| 146 | + Does not indicate if this :class:`_schema.Column` is actually indexed |
| 147 | + or not; use :attr:`_schema.Table.indexes`. |
| 148 | +
|
| 149 | + .. seealso:: |
| 150 | +
|
| 151 | + :attr:`_schema.Table.indexes` |
| 152 | + """ |
| 153 | + |
| 154 | +unique=None |
| 155 | +"""The value of the :paramref:`_schema.Column.unique` parameter. |
| 156 | +
|
| 157 | + Does not indicate if this :class:`_schema.Column` is actually subject to |
| 158 | + a unique constraint or not; use :attr:`_schema.Table.indexes` and |
| 159 | + :attr:`_schema.Table.constraints`. |
| 160 | +
|
| 161 | + .. seealso:: |
| 162 | +
|
| 163 | + :attr:`_schema.Table.indexes` |
| 164 | +
|
| 165 | + :attr:`_schema.Table.constraints`. |
7 | 166 |
|
| 167 | + """ |
8 | 168 |
|
9 | 169 | classDatabricksIdentifierPreparer(compiler.IdentifierPreparer): |
10 | 170 | # SparkSQL identifier specification: |
|