diff --git a/pramen/core/src/main/scala/za/co/absa/pramen/core/reader/TableReaderJdbc.scala b/pramen/core/src/main/scala/za/co/absa/pramen/core/reader/TableReaderJdbc.scala index 750ebcac3..d80f77ec6 100644 --- a/pramen/core/src/main/scala/za/co/absa/pramen/core/reader/TableReaderJdbc.scala +++ b/pramen/core/src/main/scala/za/co/absa/pramen/core/reader/TableReaderJdbc.scala @@ -192,8 +192,11 @@ class TableReaderJdbc(jdbcReaderConfig: TableReaderJdbcConfig, JdbcSparkUtils.withJdbcMetadata(jdbcReaderConfig.jdbcConfig, sql) { (connection, jdbcMetadata) => val schemaWithMetadata = JdbcSparkUtils.addMetadataFromJdbc(df.schema, jdbcMetadata) val schemaWithColumnDescriptions = tableOpt match { - case Some(table) => JdbcSparkUtils.addColumnDescriptionsFromJdbc(schemaWithMetadata, table, connection) - case None => schemaWithMetadata + case Some(table) => + log.info(s"Reading JDBC metadata descriptions the table: $table") + JdbcSparkUtils.addColumnDescriptionsFromJdbc(schemaWithMetadata, sqlGen.unquote(table), connection) + case None => + schemaWithMetadata } df = spark.createDataFrame(df.rdd, schemaWithColumnDescriptions) } diff --git a/pramen/core/src/main/scala/za/co/absa/pramen/core/reader/TableReaderJdbcNative.scala b/pramen/core/src/main/scala/za/co/absa/pramen/core/reader/TableReaderJdbcNative.scala index 93b07b9d1..e7a1831bd 100644 --- a/pramen/core/src/main/scala/za/co/absa/pramen/core/reader/TableReaderJdbcNative.scala +++ b/pramen/core/src/main/scala/za/co/absa/pramen/core/reader/TableReaderJdbcNative.scala @@ -88,12 +88,13 @@ class TableReaderJdbcNative(jdbcReaderConfig: TableReaderJdbcConfig, } if (jdbcReaderConfig.enableSchemaMetadata) { - JdbcSparkUtils.withJdbcMetadata(jdbcReaderConfig.jdbcConfig, sql) { (connection, jdbcMetadata) => + JdbcSparkUtils.withJdbcMetadata(jdbcReaderConfig.jdbcConfig, sql) { (connection, _) => val schemaWithColumnDescriptions = tableOpt match { case Some(table) => - log.info(s"Reading JDBC metadata descriptions the query: $sql") - JdbcSparkUtils.addColumnDescriptionsFromJdbc(df.schema, table, connection) - case None => df.schema + log.info(s"Reading JDBC metadata descriptions the table: $table") + JdbcSparkUtils.addColumnDescriptionsFromJdbc(df.schema, sqlGen.unquote(table), connection) + case None => + df.schema } df = spark.createDataFrame(df.rdd, schemaWithColumnDescriptions) } diff --git a/pramen/core/src/main/scala/za/co/absa/pramen/core/utils/JdbcSparkUtils.scala b/pramen/core/src/main/scala/za/co/absa/pramen/core/utils/JdbcSparkUtils.scala index 4a1a93b88..b89cfbda5 100644 --- a/pramen/core/src/main/scala/za/co/absa/pramen/core/utils/JdbcSparkUtils.scala +++ b/pramen/core/src/main/scala/za/co/absa/pramen/core/utils/JdbcSparkUtils.scala @@ -26,7 +26,6 @@ import za.co.absa.pramen.core.utils.impl.JdbcFieldMetadata import java.sql.{Connection, DatabaseMetaData, ResultSet, ResultSetMetaData} import scala.collection.mutable.ListBuffer -import scala.util.control.NonFatal object JdbcSparkUtils { private val log = LoggerFactory.getLogger(this.getClass)