Skip to content

Commit

Permalink
Remove emitting metrics to clusterman since those tables don't exist …
Browse files Browse the repository at this point in the history
…anymore. (#150)
  • Loading branch information
jsleight authored Sep 25, 2024
1 parent c0e312f commit 0c5dfc9
Show file tree
Hide file tree
Showing 2 changed files with 0 additions and 50 deletions.
33 changes: 0 additions & 33 deletions service_configuration_lib/spark_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -1317,35 +1317,6 @@ def generate_clusterman_metrics_entries(
}


def _emit_resource_requirements(
clusterman_metrics,
resources: Mapping[str, int],
app_name: str,
spark_web_url: str,
cluster: str,
pool: str,
) -> None:

with open(CLUSTERMAN_YAML_FILE_PATH, 'r') as clusterman_yaml_file:
clusterman_yaml = yaml.safe_load(clusterman_yaml_file.read())
aws_region = clusterman_yaml['clusters'][cluster]['aws_region']

client = clusterman_metrics.ClustermanMetricsBotoClient(
region_name=aws_region, app_identifier=pool,
)
metrics_entries = generate_clusterman_metrics_entries(
clusterman_metrics,
resources,
app_name,
spark_web_url,
)
with client.get_writer(
clusterman_metrics.APP_METRICS, aggregate_meteorite_dims=True,
) as writer:
for metric_key, required_quantity in metrics_entries.items():
writer.send((metric_key, int(time.time()), required_quantity))


def get_spark_hourly_cost(
clusterman_metrics,
resources: Mapping[str, int],
Expand Down Expand Up @@ -1387,10 +1358,6 @@ def send_and_calculate_resources_cost(
is the requested resources.
"""
cluster = spark_conf['spark.executorEnv.PAASTA_CLUSTER']
app_name = spark_conf['spark.app.name']
resources = get_resources_requested(spark_conf)
hourly_cost = get_spark_hourly_cost(clusterman_metrics, resources, cluster, pool)
_emit_resource_requirements(
clusterman_metrics, resources, app_name, spark_web_url, cluster, pool,
)
return hourly_cost, resources
17 changes: 0 additions & 17 deletions tests/spark_config_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -1703,7 +1703,6 @@ def test_send_and_calculate_resources_cost(
mock_get_resources_requested,
mock_time,
):
mock_clusterman_metrics.generate_key_with_dimensions.side_effect = lambda x, _: x
app_name = 'test-app'
spark_opts = {
'spark.executorEnv.PAASTA_CLUSTER': 'test-cluster',
Expand All @@ -1714,22 +1713,6 @@ def test_send_and_calculate_resources_cost(
mock_clusterman_metrics, spark_opts, web_url, 'test-pool',
)

expected_dimension = {'framework_name': app_name, 'webui_url': web_url}

mock_clusterman_metrics.generate_key_with_dimensions.assert_has_calls([
mock.call('requested_cpus', expected_dimension),
mock.call('requested_mem', expected_dimension),
])

mock_writer = (
mock_clusterman_metrics.ClustermanMetricsBotoClient.return_value
.get_writer.return_value.__enter__.return_value
)
mock_writer.send.assert_has_calls([
mock.call(('requested_cpus', int(mock_time), 10)),
mock.call(('requested_mem', int(mock_time), 2048)),
])

mock_clusterman_metrics.util.costs.estimate_cost_per_hour.assert_called_once_with(
cluster='test-cluster', pool='test-pool', cpus=10, mem=2048,
)

0 comments on commit 0c5dfc9

Please sign in to comment.