Skip to content

Commit a04ef86

Browse files
温天柱温天柱
温天柱
authored and
温天柱
committed
[merge] hotfix_1.10_4.0.x_35168 -> tmp_1.10_release_4.1.x
resolve conflict
2 parents 7bbd2da + cd0be00 commit a04ef86

File tree

7 files changed

+204
-103
lines changed

7 files changed

+204
-103
lines changed

core/src/main/java/com/dtstack/flink/sql/GetPlan.java

+5-2
Original file line numberDiff line numberDiff line change
@@ -30,9 +30,10 @@
3030
import java.net.URLClassLoader;
3131

3232
/**
33-
* local模式获取sql任务的执行计划
33+
* local模式获取sql任务的执行计划
3434
* Date: 2020/2/17
3535
* Company: www.dtstack.com
36+
*
3637
* @author maqi
3738
*/
3839
public class GetPlan {
@@ -46,7 +47,9 @@ public static String getExecutionPlan(String[] args) {
4647
ParamsInfo paramsInfo = ExecuteProcessHelper.parseParams(args);
4748
paramsInfo.setGetPlan(true);
4849
ClassLoader envClassLoader = StreamExecutionEnvironment.class.getClassLoader();
49-
ClassLoader plannerClassLoader = URLClassLoader.newInstance(new URL[0], envClassLoader);
50+
ClassLoader plannerClassLoader = URLClassLoader.newInstance(
51+
paramsInfo.getJarUrlList().toArray(new URL[0]),
52+
envClassLoader);
5053
Thread.currentThread().setContextClassLoader(plannerClassLoader);
5154
StreamExecutionEnvironment env = ExecuteProcessHelper.getStreamExecution(paramsInfo);
5255
String executionPlan = env.getExecutionPlan();

core/src/main/java/com/dtstack/flink/sql/classloader/ClassLoaderManager.java

+14-2
Original file line numberDiff line numberDiff line change
@@ -20,10 +20,12 @@
2020

2121
import com.dtstack.flink.sql.util.PluginUtil;
2222
import com.dtstack.flink.sql.util.ReflectionUtils;
23+
import org.apache.commons.codec.digest.DigestUtils;
2324
import org.apache.commons.lang3.StringUtils;
2425
import org.slf4j.Logger;
2526
import org.slf4j.LoggerFactory;
2627

28+
import java.io.FileInputStream;
2729
import java.lang.reflect.InvocationTargetException;
2830
import java.lang.reflect.Method;
2931
import java.net.URL;
@@ -71,9 +73,19 @@ private static DtClassLoader retrieveClassLoad(String pluginJarPath) {
7173
});
7274
}
7375

74-
private static DtClassLoader retrieveClassLoad(List<URL> jarUrls) {
76+
public static DtClassLoader retrieveClassLoad(List<URL> jarUrls) {
7577
jarUrls.sort(Comparator.comparing(URL::toString));
76-
String jarUrlkey = StringUtils.join(jarUrls, "_");
78+
79+
List<String> jarMd5s = new ArrayList<>(jarUrls.size());
80+
for (URL jarUrl : jarUrls) {
81+
try (FileInputStream inputStream = new FileInputStream(jarUrl.getPath())){
82+
String jarMd5 = DigestUtils.md5Hex(inputStream);
83+
jarMd5s.add(jarMd5);
84+
} catch (Exception e) {
85+
throw new RuntimeException("Exceptions appears when read file:" + e);
86+
}
87+
}
88+
String jarUrlkey = StringUtils.join(jarMd5s, "_");
7789
return pluginClassLoader.computeIfAbsent(jarUrlkey, k -> {
7890
try {
7991
URL[] urls = jarUrls.toArray(new URL[jarUrls.size()]);

core/src/main/java/com/dtstack/flink/sql/exec/ExecuteProcessHelper.java

+11-14
Original file line numberDiff line numberDiff line change
@@ -55,6 +55,7 @@
5555
import org.apache.calcite.sql.SqlInsert;
5656
import org.apache.calcite.sql.SqlNode;
5757
import org.apache.commons.io.Charsets;
58+
import org.apache.commons.lang3.SerializationUtils;
5859
import org.apache.commons.lang3.StringUtils;
5960
import org.apache.flink.api.common.typeinfo.TypeInformation;
6061
import org.apache.flink.api.java.typeutils.RowTypeInfo;
@@ -79,6 +80,7 @@
7980
import java.time.ZoneId;
8081
import java.util.ArrayList;
8182
import java.util.Arrays;
83+
import java.util.HashMap;
8284
import java.util.List;
8385
import java.util.Map;
8486
import java.util.Objects;
@@ -244,7 +246,11 @@ private static void sqlTranslation(String localSqlPluginPath,
244246
scope++;
245247
}
246248

249+
final Map<String, AbstractSideTableInfo> tmpTableMap = new HashMap<>();
247250
for (InsertSqlParser.SqlParseResult result : sqlTree.getExecSqlList()) {
251+
// prevent current sql use last sql's sideTableInfo
252+
sideTableMap.forEach((s, abstractSideTableInfo) -> tmpTableMap.put(s, SerializationUtils.clone(abstractSideTableInfo)));
253+
248254
if (LOG.isInfoEnabled()) {
249255
LOG.info("exe-sql:\n" + result.getExecSql());
250256
}
@@ -257,17 +263,17 @@ private static void sqlTranslation(String localSqlPluginPath,
257263
SqlNode sqlNode = flinkPlanner.getParser().parse(realSql);
258264
String tmpSql = ((SqlInsert) sqlNode).getSource().toString();
259265
tmp.setExecSql(tmpSql);
260-
sideSqlExec.exec(tmp.getExecSql(), sideTableMap, tableEnv, registerTableCache, tmp, scope + "");
266+
sideSqlExec.exec(tmp.getExecSql(), tmpTableMap, tableEnv, registerTableCache, tmp, scope + "");
261267
} else {
262268
for (String sourceTable : result.getSourceTableList()) {
263-
if (sideTableMap.containsKey(sourceTable)) {
269+
if (tmpTableMap.containsKey(sourceTable)) {
264270
isSide = true;
265271
break;
266272
}
267273
}
268274
if (isSide) {
269275
//sql-dimensional table contains the dimension table of execution
270-
sideSqlExec.exec(result.getExecSql(), sideTableMap, tableEnv, registerTableCache, null, String.valueOf(scope));
276+
sideSqlExec.exec(result.getExecSql(), tmpTableMap, tableEnv, registerTableCache, null, String.valueOf(scope));
271277
} else {
272278
LOG.info("----------exec sql without dimension join-----------");
273279
LOG.info("----------real sql exec is--------------------------\n{}", result.getExecSql());
@@ -280,26 +286,17 @@ private static void sqlTranslation(String localSqlPluginPath,
280286

281287
scope++;
282288
}
289+
tmpTableMap.clear();
283290
}
284291
}
285292

286293
public static void registerUserDefinedFunction(SqlTree sqlTree, List<URL> jarUrlList, TableEnvironment tableEnv, boolean getPlan)
287294
throws IllegalAccessException, InvocationTargetException {
288295
// udf和tableEnv须由同一个类加载器加载
289-
ClassLoader levelClassLoader = tableEnv.getClass().getClassLoader();
290296
ClassLoader currentClassLoader = Thread.currentThread().getContextClassLoader();
291-
URLClassLoader classLoader = null;
297+
URLClassLoader classLoader = ClassLoaderManager.loadExtraJar(jarUrlList, (URLClassLoader) currentClassLoader);
292298
List<CreateFuncParser.SqlParserResult> funcList = sqlTree.getFunctionList();
293299
for (CreateFuncParser.SqlParserResult funcInfo : funcList) {
294-
// 构建plan的情况下,udf和tableEnv不需要是同一个类加载器
295-
if (getPlan) {
296-
classLoader = ClassLoaderManager.loadExtraJar(jarUrlList, (URLClassLoader) currentClassLoader);
297-
}
298-
299-
//classloader
300-
if (classLoader == null) {
301-
classLoader = ClassLoaderManager.loadExtraJar(jarUrlList, (URLClassLoader) levelClassLoader);
302-
}
303300
FunctionManager.registerUDF(funcInfo.getType(), funcInfo.getClassName(), funcInfo.getName(), tableEnv, classLoader);
304301
}
305302
}

core/src/main/java/com/dtstack/flink/sql/util/TableUtils.java

+16-2
Original file line numberDiff line numberDiff line change
@@ -539,11 +539,25 @@ private static void replaceConditionNode(SqlNode selectNode, String oldTbName, S
539539

540540
String tableName = sqlIdentifier.names.asList().get(0);
541541
String tableField = sqlIdentifier.names.asList().get(1);
542-
String fieldKey = tableName + "_" + tableField;
542+
String fieldKey = tableName + "." + tableField;
543543

544544
if(tableName.equalsIgnoreCase(oldTbName)){
545545

546-
String newFieldName = fieldReplaceRef.get(fieldKey) == null ? tableField : fieldReplaceRef.get(fieldKey);
546+
/*
547+
* ****Before replace:*****
548+
* fieldKey: b.department
549+
* fieldReplaceRef : b.department -> a_b_0.department0
550+
* oldFieldRef: a_b_0.department0
551+
* oldTbName: b
552+
* oldFieldName: department
553+
* ****After replace:*****
554+
* newTbName: a_b_0
555+
* newFieldName: department0
556+
*/
557+
String oldFieldRef = fieldReplaceRef.get(fieldKey);
558+
String newFieldName = (oldFieldRef != null && !StringUtils.substringAfter(oldFieldRef, ".").isEmpty()) ?
559+
StringUtils.substringAfter(oldFieldRef, ".") : tableField;
560+
547561
SqlIdentifier newField = ((SqlIdentifier)selectNode).setName(0, newTbName);
548562
newField = newField.setName(1, newFieldName);
549563
((SqlIdentifier)selectNode).assignNamesFrom(newField);

0 commit comments

Comments
 (0)