Skip to content

Commit

Permalink
fix
Browse files Browse the repository at this point in the history
  • Loading branch information
sperlingxx committed Dec 17, 2024
1 parent 967d345 commit 00580a0
Show file tree
Hide file tree
Showing 2 changed files with 10 additions and 10 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -256,15 +256,15 @@ trait GpuPartitioning extends Partitioning {
private var memCopyTime: Option[GpuMetric] = None

/**
* Setup Spark SQL Metrics for the details of GpuPartition. This method is expected to be called
* at the query planning stage for only once.
* Setup sub-metrics for the performance debugging of GpuPartition. This method is expected to
* be called at the query planning stage. Therefore, this method is NOT thread safe.
*/
def setupMetrics(metrics: Map[String, GpuMetric]): Unit = {
metrics.get(GpuPartitioning.CopyToHostTime).foreach { metric =>
// Check and set GpuPartitioning.CopyToHostTime
require(memCopyTime.isEmpty,
s"The GpuMetric[${GpuPartitioning.CopyToHostTime}] has already been set")
memCopyTime = Some(metric)
def setupDebugMetrics(metrics: Map[String, GpuMetric]): Unit = {
// Check and set GpuPartitioning.CopyToHostTime
if (memCopyTime.isEmpty) {
metrics.get(GpuPartitioning.CopyToHostTime).foreach { metric =>
memCopyTime = Some(metric)
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -368,10 +368,10 @@ object GpuShuffleExchangeExecBase {
rdd
}
val partitioner: GpuExpression = getPartitioner(newRdd, outputAttributes, newPartitioning)
// Inject detailed Metrics, such as D2HTime before SliceOnCpu
// Inject debugging subMetrics, such as D2HTime before SliceOnCpu
// The injected metrics will be serialized as the members of GpuPartitioning
partitioner match {
case pt: GpuPartitioning => pt.setupMetrics(additionalMetrics)
case pt: GpuPartitioning => pt.setupDebugMetrics(additionalMetrics)
case _ =>
}
val partitionTime: GpuMetric = metrics(METRIC_SHUFFLE_PARTITION_TIME)
Expand Down

0 comments on commit 00580a0

Please sign in to comment.