Skip to content

Commit

Permalink
fix test, init ray cluster properly
Browse files Browse the repository at this point in the history
  • Loading branch information
pang-wu committed Dec 22, 2024
1 parent 0fdd0ee commit 903bce0
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 6 deletions.
2 changes: 2 additions & 0 deletions python/raydp/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,7 @@ def spark_on_ray_small(request):
})

def stop_all():
spark.stop()
raydp.stop_spark()
time.sleep(5)
ray.shutdown()
Expand All @@ -88,6 +89,7 @@ def spark_on_ray_2_executors(request):
})

def stop_all():
spark.stop()
raydp.stop_spark()
time.sleep(5)
ray.shutdown()
Expand Down
10 changes: 4 additions & 6 deletions python/raydp/tests/test_spark_cluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,12 +41,11 @@ def test_spark(spark_on_ray_small):

def test_legacy_spark_on_fractional_cpu():
cluster = Cluster(
initialize_head=True,
connect=True,
head_node_args={
"num_cpus": 2
})

ray.init(address=cluster.address, include_dashboard=False)
spark = raydp.init_spark(app_name="test_cpu_fraction",
num_executors=1, executor_cores=3, executor_memory="500M",
configs={"spark.ray.actor.resource.cpu": "0.1"})
Expand All @@ -62,12 +61,11 @@ def test_legacy_spark_on_fractional_cpu():

def test_spark_executor_on_fractional_cpu():
cluster = Cluster(
initialize_head=True,
connect=True,
head_node_args={
"num_cpus": 2
})

ray.init(address=cluster.address, include_dashboard=False)
spark = raydp.init_spark(app_name="test_cpu_fraction",
num_executors=1, executor_cores=3, executor_memory="500M",
configs={"spark.ray.raydp_spark_executor.actor.resource.cpu": "0.1"})
Expand All @@ -83,19 +81,19 @@ def test_spark_executor_on_fractional_cpu():

def test_spark_executor_node_affinity():
cluster = Cluster(
initialize_head=True,
connect=True,
head_node_args={
"num_cpus": 1,
})
cluster.add_node(num_cpus=2, resources={"spark_executor": 10})

ray.init(address=cluster.address, include_dashboard=False)
spark = raydp.init_spark(app_name="test_executor_node_affinity",
num_executors=1, executor_cores=2, executor_memory="500M",
configs={"spark.ray.raydp_spark_executor.actor.resource.spark_executor": "1"})
result = spark.range(0, 10).count()
assert result == 10

spark.stop()
raydp.stop_spark()
time.sleep(5)
ray.shutdown()
Expand Down

0 comments on commit 903bce0

Please sign in to comment.