Skip to content

Commit c8a6821

Browse files
committed
Fix build
1 parent adb0670 commit c8a6821

File tree

1 file changed

+2
-5
lines changed

1 file changed

+2
-5
lines changed

iceberg/src/main/scala/com/nvidia/spark/rapids/iceberg/GpuIcebergPartitioner.scala

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ import java.lang.Math.toIntExact
2020

2121
import scala.collection.JavaConverters._
2222

23-
import ai.rapids.cudf.{ColumnVector => CudfColumnVector, Table}
23+
import ai.rapids.cudf.{ColumnVector => CudfColumnVector, OrderByArg, Scalar, Table}
2424
import com.nvidia.spark.rapids.{GpuBoundReference, GpuColumnVector, GpuExpression, GpuLiteral, RapidsHostColumnVector, SpillableColumnarBatch, SpillPriorities}
2525
import com.nvidia.spark.rapids.Arm.{closeOnExcept, withResource}
2626
import com.nvidia.spark.rapids.RapidsPluginImplicits.AutoCloseableProducingSeq
@@ -54,12 +54,9 @@ class GpuIcebergPartitioner(val spec: PartitionSpec,
5454
private val partitionExprs: Seq[GpuExpression] = spec.fields().asScala.map(getPartitionExpr).toSeq
5555

5656
private val keyColNum: Int = spec.fields().size()
57-
private val inputColNum: Int = dataSparkType.fields.length
5857

5958
// key column indices in the table: [key columns, input columns]
6059
private val keyColIndices: Array[Int] = (0 until keyColNum).toArray
61-
// input column indices in the table: [key columns, input columns]
62-
private val inputColumnIndices: Array[Int] = (keyColNum until (keyColNum + inputColNum)).toArray
6360

6461
/**
6562
* Make a new table: [key columns, input columns]
@@ -109,7 +106,7 @@ class GpuIcebergPartitioner(val spec: PartitionSpec,
109106
// note: the result does not contain the key columns
110107
val splitRet = withResource(keysAndInputTable) { _ =>
111108
keysAndInputTable.groupBy(keyColIndices: _*)
112-
.contiguousSplitGroupsAndGenUniqKeys(inputColumnIndices)
109+
.contiguousSplitGroupsAndGenUniqKeys()
113110
}
114111

115112
// generate results

0 commit comments

Comments
 (0)