@@ -272,37 +272,37 @@ def test_custom_installed_spark(custom_spark_dir):
272
272
assert spark_home == custom_spark_dir
273
273
274
274
275
- def start_spark (barrier , i , results ):
276
- # try:
277
- # connect to the cluster started before pytest
278
- ray .init (address = "auto" )
279
- spark = raydp .init_spark (f"spark-{ i } " , 1 , 1 , "500M" )
280
- # wait on barrier to ensure 2 spark sessions
281
- # are active on the same ray cluster at the same time
282
- barrier .wait ()
283
- df = spark .range (10 )
284
- results [i ] = df .count ()
285
- raydp .stop_spark ()
286
- ray .shutdown ()
287
- # except Exception as e:
288
- # results[i] = -1
289
-
290
-
291
- def test_init_spark_twice ():
292
- num_processes = 2
293
- ctx = get_context ("spawn" )
294
- barrier = ctx .Barrier (num_processes )
295
- # shared memory for processes to return if spark started successfully
296
- results = ctx .Array ('i' , [- 1 ] * num_processes )
297
- processes = [ctx .Process (target = start_spark , args = (barrier , i , results )) for i in range (num_processes )]
298
- for i in range (2 ):
299
- processes [i ].start ()
300
-
301
- for i in range (2 ):
302
- processes [i ].join ()
303
-
304
- assert results [0 ] == 10
305
- assert results [1 ] == 10
275
+ # def start_spark(barrier, i, results):
276
+ # # try:
277
+ # # connect to the cluster started before pytest
278
+ # ray.init(address="auto")
279
+ # spark = raydp.init_spark(f"spark-{i}", 1, 1, "500M")
280
+ # # wait on barrier to ensure 2 spark sessions
281
+ # # are active on the same ray cluster at the same time
282
+ # barrier.wait()
283
+ # df = spark.range(10)
284
+ # results[i] = df.count()
285
+ # raydp.stop_spark()
286
+ # ray.shutdown()
287
+ # # except Exception as e:
288
+ # # results[i] = -1
289
+
290
+
291
+ # def test_init_spark_twice():
292
+ # num_processes = 2
293
+ # ctx = get_context("spawn")
294
+ # barrier = ctx.Barrier(num_processes)
295
+ # # shared memory for processes to return if spark started successfully
296
+ # results = ctx.Array('i', [-1] * num_processes)
297
+ # processes = [ctx.Process(target=start_spark, args=(barrier, i, results)) for i in range(num_processes)]
298
+ # for i in range(2):
299
+ # processes[i].start()
300
+
301
+ # for i in range(2):
302
+ # processes[i].join()
303
+
304
+ # assert results[0] == 10
305
+ # assert results[1] == 10
306
306
307
307
308
308
if __name__ == "__main__" :
0 commit comments