Skip to content

Commit 2fdf9d3

Browse files
authored
[EnginePlugin][Spark]Turn off use secure random by default (#5197)
* Turn off use secure random by default close #5196 * Update Notification Mailing List * Fix ds meta service build
1 parent 9036be6 commit 2fdf9d3

File tree

5 files changed

+20
-12
lines changed

5 files changed

+20
-12
lines changed

Diff for: .asf.yaml

+2-2
Original file line numberDiff line numberDiff line change
@@ -67,6 +67,6 @@ github:
6767
required_approving_review_count: 1
6868
notifications:
6969
70-
issues: notifications@linkis.apache.org
71-
pullrequests: notifications@linkis.apache.org
70+
issues: dev@linkis.apache.org
71+
pullrequests: dev@linkis.apache.org
7272

Diff for: linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/config/SparkConfiguration.scala

+4-1
Original file line numberDiff line numberDiff line change
@@ -156,7 +156,7 @@ object SparkConfiguration extends Logging {
156156
CommonVars("wds.linkis.spark.engineconn.fatal.log", "error writing class;OutOfMemoryError")
157157

158158
val PYSPARK_PYTHON3_PATH =
159-
CommonVars[String]("pyspark.python3.path", "/appcom/Install/anaconda3/bin/python")
159+
CommonVars[String]("pyspark.python3.path", "python3")
160160

161161
val ENABLE_REPLACE_PACKAGE_NAME =
162162
CommonVars("wds.linkis.spark.engine.scala.replace_package_header.enable", true)
@@ -182,6 +182,9 @@ object SparkConfiguration extends Logging {
182182

183183
val LINKIS_SPARK_ETL_SUPPORT_HUDI = CommonVars[Boolean]("linkis.spark.etl.support.hudi", false)
184184

185+
val LINKIS_PYSPARK_USE_SECURE_RANDOM =
186+
CommonVars[Boolean]("linkis.pyspark.use.secure.random", false).getValue
187+
185188
val SCALA_PARSE_APPEND_CODE =
186189
CommonVars("linkis.scala.parse.append.code", "val linkisVar=1").getValue
187190

Diff for: linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkPythonExecutor.scala

+7-2
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,7 @@ import org.apache.spark.sql.execution.datasources.csv.UDF
4646

4747
import java.io._
4848
import java.net.InetAddress
49+
import java.security.SecureRandom
4950
import java.util
5051

5152
import scala.collection.JavaConverters._
@@ -76,7 +77,12 @@ class SparkPythonExecutor(val sparkEngineSession: SparkEngineSession, val id: In
7677
private val lineOutputStream = new RsOutputStream
7778
val sqlContext = sparkEngineSession.sqlContext
7879
val SUCCESS = "success"
79-
private lazy val py4jToken: String = SecureRandomStringUtils.randomAlphanumeric(256)
80+
81+
private lazy val py4jToken: String = if (SparkConfiguration.LINKIS_PYSPARK_USE_SECURE_RANDOM) {
82+
SecureRandomStringUtils.randomAlphanumeric(256)
83+
} else {
84+
SecureRandom.getInstance("SHA1PRNG").nextInt(100000).toString
85+
}
8086

8187
private lazy val gwBuilder: GatewayServerBuilder = {
8288
val builder = new GatewayServerBuilder()
@@ -152,7 +158,6 @@ class SparkPythonExecutor(val sparkEngineSession: SparkEngineSession, val id: In
152158
)
153159
val userDefinePythonVersion = engineCreationContext.getOptions
154160
.getOrDefault("spark.python.version", "python")
155-
.toString
156161
.toLowerCase()
157162
val sparkPythonVersion =
158163
if (

Diff for: linkis-public-enhancements/distribution.xml

+1-1
Original file line numberDiff line numberDiff line change
@@ -290,7 +290,7 @@
290290
<fileSets>
291291
<fileSet>
292292
<directory>
293-
./linkis-datasource/linkis-datasource-manager/server/target/out/lib/service
293+
./linkis-datasource/linkis-datasource-manager/server/target/out/service
294294
</directory>
295295
<outputDirectory>
296296
lib/metadataquery-service

Diff for: linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/main/assembly/distribution.xml

+6-6
Original file line numberDiff line numberDiff line change
@@ -187,7 +187,7 @@
187187
<fileSet>
188188
<directory>${basedir}/../service/elasticsearch/target/out/lib</directory>
189189
<fileMode>0755</fileMode>
190-
<outputDirectory>lib/service/elasticsearch</outputDirectory>
190+
<outputDirectory>service/elasticsearch</outputDirectory>
191191
<includes>
192192
<include>*.jar</include>
193193
</includes>
@@ -198,7 +198,7 @@
198198
<fileSet>
199199
<directory>${basedir}/../service/hive/target/out/lib</directory>
200200
<fileMode>0755</fileMode>
201-
<outputDirectory>lib/service/hive</outputDirectory>
201+
<outputDirectory>service/hive</outputDirectory>
202202
<includes>
203203
<include>*.jar</include>
204204
</includes>
@@ -209,7 +209,7 @@
209209
<fileSet>
210210
<directory>${basedir}/../service/kafka/target/out/lib</directory>
211211
<fileMode>0755</fileMode>
212-
<outputDirectory>lib/service/kafka</outputDirectory>
212+
<outputDirectory>service/kafka</outputDirectory>
213213
<includes>
214214
<include>*.jar</include>
215215
</includes>
@@ -220,7 +220,7 @@
220220
<fileSet>
221221
<directory>${basedir}/../service/jdbc/target/out/lib</directory>
222222
<fileMode>0755</fileMode>
223-
<outputDirectory>lib/service/jdbc</outputDirectory>
223+
<outputDirectory>service/jdbc</outputDirectory>
224224
<includes>
225225
<include>*.jar</include>
226226
</includes>
@@ -231,7 +231,7 @@
231231
<fileSet>
232232
<directory>${basedir}/../service/hdfs/target/out/lib</directory>
233233
<fileMode>0755</fileMode>
234-
<outputDirectory>lib/service/hdfs</outputDirectory>
234+
<outputDirectory>service/hdfs</outputDirectory>
235235
<includes>
236236
<include>*.jar</include>
237237
</includes>
@@ -242,7 +242,7 @@
242242
<fileSet>
243243
<directory>${basedir}/../service/mongodb/target/out/lib</directory>
244244
<fileMode>0755</fileMode>
245-
<outputDirectory>lib/service/mongodb</outputDirectory>
245+
<outputDirectory>service/mongodb</outputDirectory>
246246
<includes>
247247
<include>*.jar</include>
248248
</includes>

0 commit comments

Comments
 (0)