@@ -37,12 +37,12 @@ INSERT INTO dict (dict_code, dict_name, dict_value, dict_desc, type, sort, data_
37
37
INSERT INTO dict (dict_code, dict_name, dict_value, dict_desc, type, sort, data_type, depend_name, is_default, gmt_create, gmt_modified, is_deleted) VALUES (' typename_mapping' , ' yarn3-hdfs3-spark320' , ' -109' , null , 6 , 0 , ' LONG' , ' ' , 0 , now(),now(), 0 );
38
38
39
39
DELETE FROM dict WHERE dict_code = ' component_model_config' AND depend_name = ' YARN' ;
40
- INSERT INTO dict (dict_code, dict_name, dict_value, dict_desc, type, sort, data_type, depend_name, is_default, gmt_create, gmt_modified, is_deleted) VALUES (' component_model_config' , ' Apache Hadoop 2.x' , ' {"HDFS": {"HDFS": "yarn2-hdfs2-hadoop2", "FLINK": [{"112": "yarn2-hdfs2-flink112"}], "SPARK": [{"320": "yarn2-hdfs2-spark320"}, {"210": "yarn2-hdfs2-spark210"], "SCRIPT": "yarn2-hdfs2-script"}, "YARN": "yarn2"}' , null , 14 , 1 , ' STRING' , ' YARN' , 0 , now(),now(), 0 );
40
+ INSERT INTO dict (dict_code, dict_name, dict_value, dict_desc, type, sort, data_type, depend_name, is_default, gmt_create, gmt_modified, is_deleted) VALUES (' component_model_config' , ' Apache Hadoop 2.x' , ' {"HDFS": {"HDFS": "yarn2-hdfs2-hadoop2", "FLINK": [{"112": "yarn2-hdfs2-flink112"}], "SPARK": [{"320": "yarn2-hdfs2-spark320"}, {"210": "yarn2-hdfs2-spark210"} ], "SCRIPT": "yarn2-hdfs2-script"}, "YARN": "yarn2"}' , null , 14 , 1 , ' STRING' , ' YARN' , 0 , now(),now(), 0 );
41
41
INSERT INTO dict (dict_code, dict_name, dict_value, dict_desc, type, sort, data_type, depend_name, is_default, gmt_create, gmt_modified, is_deleted) VALUES (' component_model_config' , ' Apache Hadoop 3.x' , ' {"HDFS": {"HDFS": "yarn3-hdfs3-hadoop3", "FLINK": [{"112": "yarn3-hdfs3-flink112"}], "SPARK": [{"320": "yarn3-hdfs3-spark320"}, {"210": "yarn3-hdfs3-spark210"}], "SCRIPT": "yarn3-hdfs3-script"}, "YARN": "yarn3"}' , null , 14 , 1 , ' STRING' , ' YARN' , 0 , now(),now(), 0 );
42
42
INSERT INTO dict (dict_code, dict_name, dict_value, dict_desc, type, sort, data_type, depend_name, is_default, gmt_create, gmt_modified, is_deleted) VALUES (' component_model_config' , ' CDH 5.x' , ' {"HDFS": {"HDFS": "yarn2-hdfs2-hadoop2", "FLINK": [{"112": "yarn2-hdfs2-flink112"}], "SPARK": [{"320": "yarn2-hdfs2-spark320"}, {"210": "yarn2-hdfs2-spark210"}], "SCRIPT": "yarn2-hdfs2-script"}, "YARN": "yarn2"}' , null , 14 , 1 , ' STRING' , ' YARN' , 0 , now(),now(), 0 );
43
43
INSERT INTO dict (dict_code, dict_name, dict_value, dict_desc, type, sort, data_type, depend_name, is_default, gmt_create, gmt_modified, is_deleted) VALUES (' component_model_config' , ' CDH 6.0.x' , ' {"HDFS": {"HDFS": "yarn3-hdfs3-hadoop3", "FLINK": [{"112": "yarn3-hdfs3-flink112"}], "SPARK": [{"320": "yarn3-hdfs3-spark320"}, {"210": "yarn3-hdfs3-spark210"}], "SCRIPT": "yarn3-hdfs3-script"}, "YARN": "yarn3"}' , null , 14 , 1 , ' STRING' , ' YARN' , 0 , now(),now(), 0 );
44
44
INSERT INTO dict (dict_code, dict_name, dict_value, dict_desc, type, sort, data_type, depend_name, is_default, gmt_create, gmt_modified, is_deleted) VALUES (' component_model_config' , ' CDH 6.1.x' , ' {"HDFS": {"HDFS": "yarn3-hdfs3-hadoop3", "FLINK": [{"112": "yarn3-hdfs3-flink112"}], "SPARK": [{"320": "yarn3-hdfs3-spark320"}, {"210": "yarn3-hdfs3-spark210"}], "SCRIPT": "yarn3-hdfs3-script"}, "YARN": "yarn3"}' , null , 14 , 1 , ' STRING' , ' YARN' , 0 , now(),now(), 0 );
45
- INSERT INTO dict (dict_code, dict_name, dict_value, dict_desc, type, sort, data_type, depend_name, is_default, gmt_create, gmt_modified, is_deleted) VALUES (' component_model_config' , ' CDH 6.2.x' , ' {"HDFS": {"HDFS": "yarn3-hdfs3-hadoop3", "FLINK": [{"112": "yarn3-hdfs3-flink112"}], "SPARK": [{"320": "yarn3-hdfs3-spark320"}, {"210": "yarn3-hdfs3-spark210"], "SCRIPT": "yarn3-hdfs3-script"}, "YARN": "yarn3"}' , null , 14 , 1 , ' STRING' , ' YARN' , 0 , now(),now(), 0 );
45
+ INSERT INTO dict (dict_code, dict_name, dict_value, dict_desc, type, sort, data_type, depend_name, is_default, gmt_create, gmt_modified, is_deleted) VALUES (' component_model_config' , ' CDH 6.2.x' , ' {"HDFS": {"HDFS": "yarn3-hdfs3-hadoop3", "FLINK": [{"112": "yarn3-hdfs3-flink112"}], "SPARK": [{"320": "yarn3-hdfs3-spark320"}, {"210": "yarn3-hdfs3-spark210"} ], "SCRIPT": "yarn3-hdfs3-script"}, "YARN": "yarn3"}' , null , 14 , 1 , ' STRING' , ' YARN' , 0 , now(),now(), 0 );
46
46
INSERT INTO dict (dict_code, dict_name, dict_value, dict_desc, type, sort, data_type, depend_name, is_default, gmt_create, gmt_modified, is_deleted) VALUES (' component_model_config' , ' CDP 7.x' , ' {"HDFS": {"HDFS": "yarn3-hdfs3-hadoop3", "FLINK": [{"112": "yarn3-hdfs3-flink112"}], "SPARK": [{"320": "yarn3-hdfs3-spark320"}, {"210": "yarn3-hdfs3-spark210"}], "SCRIPT": "yarn3-hdfs3-script"}, "YARN": "yarn3"}' , null , 14 , 1 , ' STRING' , ' YARN' , 0 , now(),now(), 0 );
47
47
INSERT INTO dict (dict_code, dict_name, dict_value, dict_desc, type, sort, data_type, depend_name, is_default, gmt_create, gmt_modified, is_deleted) VALUES (' component_model_config' , ' HDP 2.6.x' , ' {"HDFS": {"HDFS": "yarn2-hdfs2-hadoop2", "FLINK": [{"112": "yarn2-hdfs2-flink112"}], "SPARK": [{"320": "yarn2-hdfs2-spark320"}, {"210": "yarn2-hdfs2-spark210"}], "SCRIPT": "yarn2-hdfs2-script"}, "YARN": "yarn2"}' , null , 14 , 1 , ' STRING' , ' YARN' , 0 , now(),now(), 0 );
48
48
INSERT INTO dict (dict_code, dict_name, dict_value, dict_desc, type, sort, data_type, depend_name, is_default, gmt_create, gmt_modified, is_deleted) VALUES (' component_model_config' , ' HDP 3.0.x' , ' {"HDFS": {"HDFS": "yarn3-hdfs3-hadoop3", "FLINK": [{"112": "yarn3-hdfs3-flink112"}], "SPARK": [{"320": "yarn3-hdfs3-spark320"}, {"210": "yarn3-hdfs3-spark210"}], "SCRIPT": "yarn3-hdfs3-script"}, "YARN": "yarn3"}' , null , 14 , 1 , ' STRING' , ' YARN' , 0 , now(),now(), 0 );
@@ -92,4 +92,4 @@ INSERT INTO task_param_template (task_type, task_name, task_version, params, gmt
92
92
# spark.sql.shuffle.partitions=200
93
93
94
94
## 开启spark推测行为,默认false
95
- # spark.speculation=false' , now(), now(), 0 );
95
+ # spark.speculation=false' , now(), now(), 0 );
0 commit comments