@@ -229,6 +229,33 @@ def _get_partitions(
229
229
partition_names = [desc [0 ] for desc in res .cursor .description ]
230
230
return partition_names
231
231
232
+ def _has_connector_name (self , connection : Connection ):
233
+ query = dedent (
234
+ """
235
+ SELECT
236
+ COUNT(*)
237
+ FROM "system"."information_schema"."columns"
238
+ WHERE "table_catalog" = 'system'
239
+ AND "table_schema" = 'metadata'
240
+ AND "table_name" = 'catalogs'
241
+ AND "column_name" = 'connector_name'
242
+ """
243
+ ).strip ()
244
+ res = connection .execute (sql .text (query ))
245
+ return res .scalar () == 1
246
+
247
+ def _get_connector_name (self , connection : Connection , catalog_name : str ):
248
+ query = dedent (
249
+ """
250
+ SELECT
251
+ "connector_name"
252
+ FROM "system"."metadata"."catalogs"
253
+ WHERE "catalog_name" = :catalog_name
254
+ """
255
+ ).strip ()
256
+ res = connection .execute (sql .text (query ), {"catalog_name" : catalog_name })
257
+ return res .scalar ()
258
+
232
259
def get_pk_constraint (self , connection : Connection , table_name : str , schema : str = None , ** kw ) -> Dict [str , Any ]:
233
260
"""Trino has no support for primary keys. Returns a dummy"""
234
261
return dict (name = None , constrained_columns = [])
@@ -322,11 +349,21 @@ def get_indexes(self, connection: Connection, table_name: str, schema: str = Non
322
349
if not self .has_table (connection , table_name , schema ):
323
350
raise exc .NoSuchTableError (f"schema={ schema } , table={ table_name } " )
324
351
352
+ if self ._has_connector_name (connection ):
353
+ catalog_name = self ._get_default_catalog_name (connection )
354
+ if catalog_name is None :
355
+ raise exc .NoSuchTableError ("catalog is required in connection" )
356
+ connector_name = self ._get_connector_name (connection , catalog_name )
357
+ if connector_name is None :
358
+ raise exc .NoSuchTableError ("connector name is required" )
359
+ if connector_name != "hive" :
360
+ return []
361
+
325
362
partitioned_columns = None
326
363
try :
327
364
partitioned_columns = self ._get_partitions (connection , f"{ table_name } " , schema )
328
365
except Exception as e :
329
- # e.g. it's not a Hive table or an unpartitioned Hive table
366
+ # e.g. it's an unpartitioned Hive table
330
367
logger .debug ("Couldn't fetch partition columns. schema: %s, table: %s, error: %s" , schema , table_name , e )
331
368
if not partitioned_columns :
332
369
return []
0 commit comments