From 738c8e38fc23c1634667443864b80f085f2737ac Mon Sep 17 00:00:00 2001 From: "Hongbin Ma (Mahone)" Date: Tue, 3 Dec 2024 09:07:10 +0800 Subject: [PATCH] exclude previous operator's time out of firstBatchHeuristic (#11794) Signed-off-by: Hongbin Ma (Mahone) --- .../main/scala/com/nvidia/spark/rapids/GpuAggregateExec.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sql-plugin/src/main/scala/com/nvidia/spark/rapids/GpuAggregateExec.scala b/sql-plugin/src/main/scala/com/nvidia/spark/rapids/GpuAggregateExec.scala index 4ba20547e77..d5bbe15209d 100644 --- a/sql-plugin/src/main/scala/com/nvidia/spark/rapids/GpuAggregateExec.scala +++ b/sql-plugin/src/main/scala/com/nvidia/spark/rapids/GpuAggregateExec.scala @@ -2091,9 +2091,9 @@ class DynamicGpuPartialAggregateIterator( helper: AggHelper): (Iterator[ColumnarBatch], Boolean) = { // we need to decide if we are going to sort the data or not, so the very // first thing we need to do is get a batch and make a choice. + val cb = cbIter.next() withResource(new NvtxWithMetrics("dynamic sort heuristic", NvtxColor.BLUE, metrics.opTime, metrics.heuristicTime)) { _ => - val cb = cbIter.next() lazy val estimatedGrowthAfterAgg: Double = closeOnExcept(cb) { cb => val numRows = cb.numRows() val cardinality = estimateCardinality(cb)