@@ -162,46 +162,6 @@ def parse_describe_extended(
162
162
for idx , column in enumerate (rows )
163
163
]
164
164
165
- def get_columns_in_relation (self , relation : DatabricksRelation ) -> List [DatabricksColumn ]:
166
- cached_relations = self .cache .get_relations (relation .database , relation .schema )
167
- cached_relation = next (
168
- (
169
- cached_relation
170
- for cached_relation in cached_relations
171
- if str (cached_relation ) == str (relation )
172
- ),
173
- None ,
174
- )
175
- columns = []
176
- if cached_relation and cached_relation .information :
177
- columns = self .parse_columns_from_information (cached_relation )
178
- if not columns :
179
- # in open source delta 'show table extended' query output doesnt
180
- # return relation's schema. if columns are empty from cache,
181
- # use get_columns_in_relation spark macro
182
- # which would execute 'describe extended tablename' query
183
- try :
184
- rows : List [Row ] = super (SparkAdapter , self ).get_columns_in_relation (relation )
185
- columns = self .parse_describe_extended (relation , rows )
186
- except dbt .exceptions .RuntimeException as e :
187
- # spark would throw error when table doesn't exist, where other
188
- # CDW would just return and empty list, normalizing the behavior here
189
- errmsg = getattr (e , "msg" , "" )
190
- if any (
191
- msg in errmsg
192
- for msg in (
193
- "[TABLE_OR_VIEW_NOT_FOUND]" ,
194
- "Table or view not found" ,
195
- "NoSuchTableException" ,
196
- )
197
- ):
198
- pass
199
- else :
200
- raise e
201
-
202
- # strip hudi metadata columns.
203
- return [x for x in columns if x .name not in self .HUDI_METADATA_COLUMNS ]
204
-
205
165
def parse_columns_from_information (
206
166
self , relation : DatabricksRelation
207
167
) -> List [DatabricksColumn ]:
0 commit comments