Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Updating dataproc container cost to be multiplied by number of cores #648

Merged
merged 9 commits into from
Nov 3, 2023
4 changes: 3 additions & 1 deletion user_tools/src/spark_rapids_pytools/cloud_api/dataproc.py
Original file line number Diff line number Diff line change
Expand Up @@ -545,5 +545,7 @@ def _calculate_group_cost(self, cluster_inst: ClusterGetAccessor, node_type: Spa
def _get_cost_per_cluster(self, cluster: ClusterGetAccessor):
master_cost = self._calculate_group_cost(cluster, SparkNodeType.MASTER)
workers_cost = self._calculate_group_cost(cluster, SparkNodeType.WORKER)
dataproc_cost = self.price_provider.get_container_cost()
master_cores = cluster.get_nodes_cnt(SparkNodeType.MASTER) * cluster.get_node_core_count(SparkNodeType.MASTER)
worker_cores = cluster.get_nodes_cnt(SparkNodeType.WORKER) * cluster.get_node_core_count(SparkNodeType.WORKER)
dataproc_cost = self.price_provider.get_container_cost() * (master_cores + worker_cores)
return master_cost + workers_cost + dataproc_cost
6 changes: 4 additions & 2 deletions user_tools/src/spark_rapids_pytools/cloud_api/onprem.py
Original file line number Diff line number Diff line change
Expand Up @@ -306,12 +306,14 @@ def __calculate_dataproc_group_cost(self, cluster_inst: ClusterGetAccessor, node
if gpu_per_machine > 0:
gpu_unit_price = self.price_provider.get_gpu_price(gpu_type)
gpu_cost = gpu_unit_price * gpu_per_machine
return nodes_cnt * (cores_cost + memory_cost + gpu_cost)
return nodes_cnt * (cores_cost + memory_cost + dataproc_cost + gpu_cost)
Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Need to remove dataproc_cost reference here


def _get_cost_per_cluster(self, cluster: ClusterGetAccessor):
if self.price_provider.name.casefold() == 'dataproc':
master_cost = self.__calculate_dataproc_group_cost(cluster, SparkNodeType.MASTER)
workers_cost = self.__calculate_dataproc_group_cost(cluster, SparkNodeType.WORKER)
dataproc_cost = self.price_provider.get_container_cost()
master_cores = cluster.get_nodes_cnt(SparkNodeType.MASTER) * cluster.get_node_core_count(SparkNodeType.MASTER)
worker_cores = cluster.get_nodes_cnt(SparkNodeType.WORKER) * cluster.get_node_core_count(SparkNodeType.WORKER)
dataproc_cost = self.price_provider.get_container_cost() * (master_cores + worker_cores)
total_cost = master_cost + workers_cost + dataproc_cost
return total_cost
Loading