@@ -56,13 +56,9 @@ class OperationSplitter(conf: Config,
56
56
sourceTables : Seq [SourceTable ])(implicit spark : SparkSession ): Seq [Job ] = {
57
57
val specialCharacters = conf.getString(SPECIAL_CHARACTERS_IN_COLUMN_NAMES )
58
58
val temporaryDirectory = ConfigUtils .getOptionString(conf, TEMPORARY_DIRECTORY_KEY )
59
- val sourceBase = SourceManager .getSourceByName(sourceName, conf, None )
60
59
61
60
sourceTables.map(sourceTable => {
62
- val source = sourceTable.overrideConf match {
63
- case Some (confOverride) => SourceManager .getSourceByName(sourceName, conf, Some (confOverride))
64
- case None => sourceBase
65
- }
61
+ val source = SourceManager .getSourceByName(sourceName, conf, sourceTable.overrideConf)
66
62
67
63
val disableCountQuery = ConfigUtils .getOptionBoolean(source.config, DISABLE_COUNT_QUERY ).getOrElse(false )
68
64
val outputTable = metastore.getTableDef(sourceTable.metaTableName)
@@ -85,19 +81,10 @@ class OperationSplitter(conf: Config,
85
81
tables : Seq [TransferTable ])(implicit spark : SparkSession ): Seq [Job ] = {
86
82
val specialCharacters = conf.getString(SPECIAL_CHARACTERS_IN_COLUMN_NAMES )
87
83
val temporaryDirectory = ConfigUtils .getOptionString(conf, TEMPORARY_DIRECTORY_KEY )
88
- val sourceBase = SourceManager .getSourceByName(sourceName, conf, None )
89
- val sinkBase = SinkManager .getSinkByName(sinkName, conf, None )
90
84
91
85
tables.map(transferTable => {
92
- val source = transferTable.sourceOverrideConf match {
93
- case Some (confOverride) => SourceManager .getSourceByName(sourceName, conf, Some (confOverride))
94
- case None => sourceBase
95
- }
96
-
97
- val sink = transferTable.sinkOverrideConf match {
98
- case Some (confOverride) => SinkManager .getSinkByName(sinkName, conf, Some (confOverride))
99
- case None => sinkBase
100
- }
86
+ val source = SourceManager .getSourceByName(sourceName, conf, transferTable.sourceOverrideConf)
87
+ val sink = SinkManager .getSinkByName(sinkName, conf, transferTable.sinkOverrideConf)
101
88
102
89
val disableCountQuery = ConfigUtils .getOptionBoolean(source.config, DISABLE_COUNT_QUERY ).getOrElse(false )
103
90
val outputTable = TransferTableParser .getMetaTable(transferTable)
@@ -146,15 +133,10 @@ class OperationSplitter(conf: Config,
146
133
sinkName : String ,
147
134
sinkTables : Seq [SinkTable ])
148
135
(implicit spark : SparkSession ): Seq [Job ] = {
149
- val sinkBase = SinkManager .getSinkByName(sinkName, conf, None )
150
-
151
136
sinkTables.map(sinkTable => {
152
137
val inputTable = metastore.getTableDef(sinkTable.metaTableName)
153
138
154
- val sink = sinkTable.overrideConf match {
155
- case Some (confOverride) => SinkManager .getSinkByName(sinkName, conf, Some (confOverride))
156
- case None => sinkBase
157
- }
139
+ val sink = SinkManager .getSinkByName(sinkName, conf, sinkTable.overrideConf)
158
140
159
141
val outputTableName = sinkTable.outputTableName.getOrElse(s " ${sinkTable.metaTableName}-> $sinkName" )
160
142
0 commit comments