diff --git a/auron-spark-ui/src/main/scala/org/apache/spark/sql/execution/ui/AuronSQLAppStatusListener.scala b/auron-spark-ui/src/main/scala/org/apache/spark/sql/execution/ui/AuronSQLAppStatusListener.scala
index 0da16d4fd..651234ac5 100644
--- a/auron-spark-ui/src/main/scala/org/apache/spark/sql/execution/ui/AuronSQLAppStatusListener.scala
+++ b/auron-spark-ui/src/main/scala/org/apache/spark/sql/execution/ui/AuronSQLAppStatusListener.scala
@@ -16,6 +16,8 @@
*/
package org.apache.spark.sql.execution.ui
+import scala.annotation.nowarn
+
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.internal.Logging
import org.apache.spark.scheduler.{SparkListener, SparkListenerEvent}
@@ -23,6 +25,7 @@ import org.apache.spark.status.ElementTrackingStore
import org.apache.auron.spark.ui.AuronBuildInfoEvent
+@nowarn("cat=unused") // conf temporarily unused
class AuronSQLAppStatusListener(conf: SparkConf, kvstore: ElementTrackingStore)
extends SparkListener
with Logging {
diff --git a/pom.xml b/pom.xml
index 63ebf1873..3fff72a74 100644
--- a/pom.xml
+++ b/pom.xml
@@ -377,7 +377,10 @@
${javaVersion}
${scalaLongVersion}
+ -deprecation
+ -feature
-Ywarn-unused
+ -Xfatal-warnings
@@ -862,11 +865,15 @@
scala-maven-plugin
+ -deprecation
+ -feature
-Ywarn-unused
-Ymacro-annotations
+ -Xfatal-warnings
+ -Wconf:cat=deprecation:w
diff --git a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/auron/InterceptedValidateSparkPlan.scala b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/auron/InterceptedValidateSparkPlan.scala
index 95aefa65f..f90e4865a 100644
--- a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/auron/InterceptedValidateSparkPlan.scala
+++ b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/auron/InterceptedValidateSparkPlan.scala
@@ -16,6 +16,8 @@
*/
package org.apache.spark.sql.auron
+import scala.annotation.nowarn
+
import org.apache.spark.internal.Logging
import org.apache.spark.sql.execution.SparkPlan
@@ -71,6 +73,7 @@ object InterceptedValidateSparkPlan extends Logging {
}
}
+ @nowarn("cat=unused") // plan unused
@sparkver("3.0 / 3.1")
def validate(plan: SparkPlan): Unit = {
throw new UnsupportedOperationException("validate is not supported in spark 3.0.3 or 3.1.3")
diff --git a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/auron/ShimsImpl.scala b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/auron/ShimsImpl.scala
index cb9492c9c..428c2238a 100644
--- a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/auron/ShimsImpl.scala
+++ b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/auron/ShimsImpl.scala
@@ -19,6 +19,7 @@ package org.apache.spark.sql.auron
import java.io.File
import java.util.UUID
+import scala.annotation.nowarn
import scala.collection.mutable
import org.apache.commons.lang3.reflect.FieldUtils
@@ -965,6 +966,7 @@ class ShimsImpl extends Shims with Logging {
}
}
+ @nowarn("cat=unused") // Some params temporarily unused
@sparkver("3.4 / 3.5")
private def convertPromotePrecision(
e: Expression,
@@ -997,6 +999,7 @@ class ShimsImpl extends Shims with Logging {
}
}
+ @nowarn("cat=unused") // Some params temporarily unused
@sparkver("3.0 / 3.1 / 3.2")
private def convertBloomFilterAgg(agg: AggregateFunction): Option[pb.PhysicalAggExprNode] = None
@@ -1023,6 +1026,7 @@ class ShimsImpl extends Shims with Logging {
}
}
+ @nowarn("cat=unused") // Some params temporarily unused
@sparkver("3.0 / 3.1 / 3.2")
private def convertBloomFilterMightContain(
e: Expression,
diff --git a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/shuffle/AuronBlockStoreShuffleReader.scala b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/shuffle/AuronBlockStoreShuffleReader.scala
index e08d8490e..14ebba484 100644
--- a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/shuffle/AuronBlockStoreShuffleReader.scala
+++ b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/shuffle/AuronBlockStoreShuffleReader.scala
@@ -37,6 +37,9 @@ class AuronBlockStoreShuffleReader[K, C](
extends AuronBlockStoreShuffleReaderBase[K, C](handle, context)
with Logging {
+ // Touch mapOutputTracker to suppress -Xfatal-warnings (used in Spark 3.2+, unused in 3.0/3.1)
+ private val _ = mapOutputTracker
+
override def readBlocks(): Iterator[InputStream] = {
@sparkver("3.2 / 3.3 / 3.4 / 3.5")
def fetchIterator = new ShuffleBlockFetcherIterator(
diff --git a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/shuffle/AuronRssShuffleManagerBase.scala b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/shuffle/AuronRssShuffleManagerBase.scala
index 98863bf6a..af92b0e8b 100644
--- a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/shuffle/AuronRssShuffleManagerBase.scala
+++ b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/shuffle/AuronRssShuffleManagerBase.scala
@@ -16,6 +16,8 @@
*/
package org.apache.spark.sql.execution.auron.shuffle
+import scala.annotation.nowarn
+
import org.apache.spark.{ShuffleDependency, SparkConf, TaskContext}
import org.apache.spark.internal.Logging
import org.apache.spark.shuffle._
@@ -23,6 +25,7 @@ import org.apache.spark.sql.execution.auron.shuffle.AuronShuffleDependency.isArr
import org.apache.auron.sparkver
+@nowarn("cat=unused") // _conf temporarily unused
abstract class AuronRssShuffleManagerBase(_conf: SparkConf) extends ShuffleManager with Logging {
override def registerShuffle[K, V, C](
shuffleId: Int,
diff --git a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/joins/auron/plan/NativeShuffledHashJoinExecProvider.scala b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/joins/auron/plan/NativeShuffledHashJoinExecProvider.scala
index 0236dd260..5f763eeca 100644
--- a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/joins/auron/plan/NativeShuffledHashJoinExecProvider.scala
+++ b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/joins/auron/plan/NativeShuffledHashJoinExecProvider.scala
@@ -16,6 +16,8 @@
*/
package org.apache.spark.sql.execution.joins.auron.plan
+import scala.annotation.nowarn
+
import org.apache.spark.sql.auron.join.JoinBuildSides.JoinBuildSide
import org.apache.spark.sql.catalyst.expressions.Expression
import org.apache.spark.sql.catalyst.plans.JoinType
@@ -80,6 +82,7 @@ case object NativeShuffledHashJoinExecProvider {
NativeShuffledHashJoinExec(left, right, leftKeys, rightKeys, joinType, buildSide, isSkewJoin)
}
+ @nowarn("cat=unused") // Some params temporarily unused
@sparkver("3.1")
def provide(
left: SparkPlan,
@@ -127,6 +130,7 @@ case object NativeShuffledHashJoinExecProvider {
NativeShuffledHashJoinExec(left, right, leftKeys, rightKeys, joinType, buildSide)
}
+ @nowarn("cat=unused") // Some params temporarily unused
@sparkver("3.0")
def provide(
left: SparkPlan,
diff --git a/spark-extension-shims-spark/src/test/scala/org/apache/auron/AuronFunctionSuite.scala b/spark-extension-shims-spark/src/test/scala/org/apache/auron/AuronFunctionSuite.scala
index c7e2f6a9f..ae1d24320 100644
--- a/spark-extension-shims-spark/src/test/scala/org/apache/auron/AuronFunctionSuite.scala
+++ b/spark-extension-shims-spark/src/test/scala/org/apache/auron/AuronFunctionSuite.scala
@@ -278,8 +278,6 @@ class AuronFunctionSuite extends AuronQueryTest with BaseAuronSQLSuite {
val dateTimeStampMin = format.parse(dateStringMin).getTime
val dateTimeStampMax = format.parse(dateStringMax).getTime
format = new SimpleDateFormat("yyyy-MM-dd")
- val dateString = "2015-01-01"
- val date = format.parse(dateString)
val functions =
s"""
@@ -320,8 +318,6 @@ class AuronFunctionSuite extends AuronQueryTest with BaseAuronSQLSuite {
val dateTimeStampMin = format.parse(dateStringMin).getTime
val dateTimeStampMax = format.parse(dateStringMax).getTime
format = new SimpleDateFormat("yyyy-MM-dd")
- val dateString = "2015-07-01"
- val date = format.parse(dateString)
val functions =
s"""
diff --git a/spark-extension-shims-spark/src/test/scala/org/apache/auron/AuronQuerySuite.scala b/spark-extension-shims-spark/src/test/scala/org/apache/auron/AuronQuerySuite.scala
index 349b489aa..0fae17d49 100644
--- a/spark-extension-shims-spark/src/test/scala/org/apache/auron/AuronQuerySuite.scala
+++ b/spark-extension-shims-spark/src/test/scala/org/apache/auron/AuronQuerySuite.scala
@@ -210,7 +210,7 @@ class AuronQuerySuite extends AuronQueryTest with BaseAuronSQLSuite with AuronSQ
withTable("t") {
sql(s"CREATE EXTERNAL TABLE t(c3 INT, c2 INT) USING ORC LOCATION '$path'")
- val expected = if (forcePositionalEvolution) {
+ val _ = if (forcePositionalEvolution) {
correctAnswer
} else {
Seq(Row(null, 2), Row(null, 4), Row(null, 6), Row(null, null))
@@ -247,7 +247,7 @@ class AuronQuerySuite extends AuronQueryTest with BaseAuronSQLSuite with AuronSQ
|LOCATION '$path'
|""".stripMargin)
sql("MSCK REPAIR TABLE t")
- if (forcePositionalEvolution) {
+ val _ = if (forcePositionalEvolution) {
correctAnswer
} else {
Seq(Row(null, 2, 1), Row(null, 4, 2), Row(null, 6, 3), Row(null, null, 4))
diff --git a/spark-extension/src/main/scala/org/apache/spark/sql/auron/AuronCallNativeWrapper.scala b/spark-extension/src/main/scala/org/apache/spark/sql/auron/AuronCallNativeWrapper.scala
index bf0918c40..b4028c1e2 100644
--- a/spark-extension/src/main/scala/org/apache/spark/sql/auron/AuronCallNativeWrapper.scala
+++ b/spark-extension/src/main/scala/org/apache/spark/sql/auron/AuronCallNativeWrapper.scala
@@ -22,6 +22,7 @@ import java.nio.file.Files
import java.nio.file.StandardCopyOption
import java.util.concurrent.atomic.AtomicReference
+import scala.annotation.nowarn
import scala.collection.mutable.ArrayBuffer
import org.apache.arrow.c.ArrowArray
@@ -53,6 +54,7 @@ import org.apache.auron.protobuf.TaskDefinition
* This class has been deprecated and migrated to {@link
* org.apache.auron.jni.AuronCallNativeWrapper}. Will be removed in the future.
*/
+@nowarn("cat=deprecation") // JniBridge is temporarily used (deprecated)
@Deprecated
case class AuronCallNativeWrapper(
nativePlan: PhysicalPlanNode,
@@ -193,6 +195,7 @@ case class AuronCallNativeWrapper(
}
}
+@nowarn("cat=deprecation") // JniBridge is temporarily used (deprecated)
object AuronCallNativeWrapper extends Logging {
def initNative(): Unit = {
lazyInitNative
diff --git a/spark-extension/src/main/scala/org/apache/spark/sql/auron/AuronConverters.scala b/spark-extension/src/main/scala/org/apache/spark/sql/auron/AuronConverters.scala
index 4f124bd8f..18ec4ba71 100644
--- a/spark-extension/src/main/scala/org/apache/spark/sql/auron/AuronConverters.scala
+++ b/spark-extension/src/main/scala/org/apache/spark/sql/auron/AuronConverters.scala
@@ -18,7 +18,7 @@ package org.apache.spark.sql.auron
import java.util.ServiceLoader
-import scala.annotation.tailrec
+import scala.annotation.{nowarn, tailrec}
import scala.collection.JavaConverters._
import scala.collection.mutable
@@ -418,12 +418,14 @@ object AuronConverters extends Logging {
@sparkver(" 3.2 / 3.3 / 3.4 / 3.5")
def getIsSkewJoinFromSHJ(exec: ShuffledHashJoinExec): Boolean = exec.isSkewJoin
+ @nowarn("cat=unused")
@sparkver("3.0 / 3.1")
def getIsSkewJoinFromSHJ(exec: ShuffledHashJoinExec): Boolean = false
@sparkver("3.1 / 3.2 / 3.3 / 3.4 / 3.5")
def getShuffleOrigin(exec: ShuffleExchangeExec): Option[Any] = Some(exec.shuffleOrigin)
+ @nowarn("cat=unused")
@sparkver("3.0")
def getShuffleOrigin(exec: ShuffleExchangeExec): Option[Any] = None
@@ -649,6 +651,7 @@ object AuronConverters extends Logging {
@sparkver("3.1 / 3.2 / 3.3 / 3.4 / 3.5")
def isNullAwareAntiJoin(exec: BroadcastHashJoinExec): Boolean = exec.isNullAwareAntiJoin
+ @nowarn("cat=unused")
@sparkver("3.0")
def isNullAwareAntiJoin(exec: BroadcastHashJoinExec): Boolean = false
diff --git a/spark-extension/src/main/scala/org/apache/spark/sql/auron/NativeConverters.scala b/spark-extension/src/main/scala/org/apache/spark/sql/auron/NativeConverters.scala
index 13a627f24..0ab465cc2 100644
--- a/spark-extension/src/main/scala/org/apache/spark/sql/auron/NativeConverters.scala
+++ b/spark-extension/src/main/scala/org/apache/spark/sql/auron/NativeConverters.scala
@@ -1001,12 +1001,12 @@ object NativeConverters extends Logging {
val children = e.children.map(Cast(_, e.dataType))
buildScalarFunction(pb.ScalarFunction.Coalesce, children, e.dataType)
- case e @ StringLPad(str, len, pad) =>
+ case _ @StringLPad(str, len, pad) =>
buildScalarFunction(
pb.ScalarFunction.Lpad,
Seq(str, castIfNecessary(len, LongType), pad),
StringType)
- case e @ StringRPad(str, len, pad) =>
+ case _ @StringRPad(str, len, pad) =>
buildScalarFunction(
pb.ScalarFunction.Rpad,
Seq(str, castIfNecessary(len, LongType), pad),
diff --git a/spark-extension/src/main/scala/org/apache/spark/sql/auron/NativeHelper.scala b/spark-extension/src/main/scala/org/apache/spark/sql/auron/NativeHelper.scala
index 3ae7669ee..e16656471 100644
--- a/spark-extension/src/main/scala/org/apache/spark/sql/auron/NativeHelper.scala
+++ b/spark-extension/src/main/scala/org/apache/spark/sql/auron/NativeHelper.scala
@@ -100,7 +100,7 @@ object NativeHelper extends Logging {
if (nativePlan == null) {
return Iterator.empty
}
- var auronCallNativeWrapper = new org.apache.auron.jni.AuronCallNativeWrapper(
+ val auronCallNativeWrapper = new org.apache.auron.jni.AuronCallNativeWrapper(
ROOT_ALLOCATOR,
nativePlan,
metrics,
diff --git a/spark-extension/src/main/scala/org/apache/spark/sql/auron/Shims.scala b/spark-extension/src/main/scala/org/apache/spark/sql/auron/Shims.scala
index d2489726b..adae2fba7 100644
--- a/spark-extension/src/main/scala/org/apache/spark/sql/auron/Shims.scala
+++ b/spark-extension/src/main/scala/org/apache/spark/sql/auron/Shims.scala
@@ -18,6 +18,8 @@ package org.apache.spark.sql.auron
import java.io.File
+import scala.annotation.nowarn
+
import org.apache.spark.ShuffleDependency
import org.apache.spark.SparkContext
import org.apache.spark.TaskContext
@@ -259,6 +261,7 @@ abstract class Shims {
def getMinPartitionNum(sparkSession: SparkSession): Int
+ @nowarn("cat=unused") // Some params temporarily unused
def postTransform(plan: SparkPlan, sc: SparkContext): Unit = {}
def getAdaptiveInputPlan(exec: AdaptiveSparkPlanExec): SparkPlan
diff --git a/spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/columnar/AuronArrowColumnVector.scala b/spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/columnar/AuronArrowColumnVector.scala
index ca5c12cbd..38988aad8 100644
--- a/spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/columnar/AuronArrowColumnVector.scala
+++ b/spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/columnar/AuronArrowColumnVector.scala
@@ -16,6 +16,8 @@
*/
package org.apache.spark.sql.execution.auron.columnar
+import scala.annotation.nowarn
+
import org.apache.arrow.vector.BigIntVector
import org.apache.arrow.vector.BitVector
import org.apache.arrow.vector.DateDayVector
@@ -143,6 +145,7 @@ class AuronArrowColumnVector(vector: ValueVector)
}
object AuronArrowColumnVector {
+ @nowarn("cat=unused") // Data type get methods unimplemented (placeholder)
abstract private class ArrowVectorAccessor(private val vector: ValueVector) {
def isNullAt(rowId: Int): Boolean =
if (vector.getValueCount > 0 && vector.getValidityBuffer.capacity == 0) false
diff --git a/spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeParquetSinkBase.scala b/spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeParquetSinkBase.scala
index 260ee249e..2bd50523e 100644
--- a/spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeParquetSinkBase.scala
+++ b/spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeParquetSinkBase.scala
@@ -21,6 +21,7 @@ import java.security.PrivilegedExceptionAction
import java.util
import java.util.UUID
+import scala.annotation.nowarn
import scala.collection.JavaConverters._
import org.apache.hadoop.conf.Configuration
@@ -80,7 +81,7 @@ abstract class NativeParquetSinkBase(
hiveQlTable.getMetadata)
val tableSchema = table.schema
val hadoopConf = newHadoopConf(tableDesc)
- val job = new Job(hadoopConf)
+ val job = Job.getInstance(hadoopConf)
val parquetFileFormat = new ParquetFileFormat()
parquetFileFormat.prepareWrite(sparkSession, job, Map(), tableSchema)
@@ -114,7 +115,7 @@ abstract class NativeParquetSinkBase(
})
// init parquet schema
- val job = new Job(new JobConf(serializableConf.value))
+ val job = Job.getInstance(new JobConf(serializableConf.value))
val tableProperties = tableDesc.getProperties
val columnNameProperty: String = tableProperties.getProperty(IOConstants.COLUMNS)
val columnTypeProperty: String = tableProperties.getProperty(IOConstants.COLUMNS_TYPES)
@@ -157,6 +158,7 @@ abstract class NativeParquetSinkBase(
friendlyName = "NativeRDD.ParquetSink")
}
+ @nowarn("cat=unused") // _tableDesc temporarily unused
protected def newHadoopConf(_tableDesc: TableDesc): Configuration =
sparkSession.sessionState.newHadoopConf()
}
diff --git a/spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeShuffleExchangeBase.scala b/spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeShuffleExchangeBase.scala
index 552dcff37..8b6aa04dc 100644
--- a/spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeShuffleExchangeBase.scala
+++ b/spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeShuffleExchangeBase.scala
@@ -18,6 +18,7 @@ package org.apache.spark.sql.execution.auron.plan
import java.util.UUID
+import scala.annotation.nowarn
import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
@@ -308,6 +309,7 @@ abstract class NativeShuffleExchangeBase(
dependency
}
+ @nowarn("cat=unused") // Some params temporarily unused
private def rangePartitioningBound[K: Ordering: ClassTag, V](
partitions: Int,
rdd: RDD[_ <: Product2[K, V]],
diff --git a/spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeShuffledHashJoinBase.scala b/spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeShuffledHashJoinBase.scala
index 1f8a06c82..17dfbe258 100644
--- a/spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeShuffledHashJoinBase.scala
+++ b/spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeShuffledHashJoinBase.scala
@@ -82,6 +82,8 @@ abstract class NativeShuffledHashJoinBase(
private def nativeBuildSide = buildSide match {
case JoinBuildLeft => pb.JoinSide.LEFT_SIDE
case JoinBuildRight => pb.JoinSide.RIGHT_SIDE
+ case other =>
+ throw new IllegalArgumentException(s"Unknown Join buildSide: $other")
}
protected def rewriteKeyExprToLong(exprs: Seq[Expression]): Seq[Expression]
diff --git a/spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeUnionBase.scala b/spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeUnionBase.scala
index a62ea6f13..bf3a15499 100644
--- a/spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeUnionBase.scala
+++ b/spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeUnionBase.scala
@@ -70,7 +70,7 @@ abstract class NativeUnionBase(
val unionInputs = ArrayBuffer[(PhysicalPlanNode, Int)]()
partition match {
case p: UnionPartition[_] =>
- val rdds = unionRDD.asInstanceOf[UnionRDD[_]].rdds
+ val rdds = unionRDD.asInstanceOf[UnionRDD[Any]].rdds
val nativeRDD = rdds(p.parentRddIndex).asInstanceOf[NativeRDD]
val input = nativeRDD.nativePlan(p.parentPartition, taskContext)
for (childIndex <- rdds.indices) {
@@ -81,7 +81,7 @@ abstract class NativeUnionBase(
}
}
case p: PartitionerAwareUnionRDDPartition =>
- val rdds = unionRDD.asInstanceOf[PartitionerAwareUnionRDD[_]].rdds
+ val rdds = unionRDD.asInstanceOf[PartitionerAwareUnionRDD[Any]].rdds
for ((rdd, partition) <- rdds.zip(p.parents)) {
val nativeRDD = rdd.asInstanceOf[NativeRDD]
unionInputs.append((nativeRDD.nativePlan(partition, taskContext), partition.index))
diff --git a/spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/shuffle/AuronBlockStoreShuffleReaderBase.scala b/spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/shuffle/AuronBlockStoreShuffleReaderBase.scala
index b64ad0459..0af6e0192 100644
--- a/spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/shuffle/AuronBlockStoreShuffleReaderBase.scala
+++ b/spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/shuffle/AuronBlockStoreShuffleReaderBase.scala
@@ -180,5 +180,5 @@ trait BlockObject extends AutoCloseable {
def getFileLength: Long = throw new UnsupportedOperationException
def getByteBuffer: ByteBuffer = throw new UnsupportedOperationException
def getChannel: ReadableByteChannel = throw new UnsupportedOperationException
- def throwFetchFailed(errmsg: String): Unit = throw new UnsupportedOperationException
+ def throwFetchFailed(errmsg: String): Unit = throw new UnsupportedOperationException(errmsg)
}
diff --git a/spark-version-annotation-macros/src/main/scala/org/apache/auron/sparkver.scala b/spark-version-annotation-macros/src/main/scala/org/apache/auron/sparkver.scala
index 7d1e494d0..fd7d4ed91 100644
--- a/spark-version-annotation-macros/src/main/scala/org/apache/auron/sparkver.scala
+++ b/spark-version-annotation-macros/src/main/scala/org/apache/auron/sparkver.scala
@@ -16,8 +16,7 @@
*/
package org.apache.auron
-import scala.annotation.StaticAnnotation
-import scala.annotation.compileTimeOnly
+import scala.annotation.{compileTimeOnly, nowarn, StaticAnnotation}
import scala.language.experimental._
import scala.reflect.macros.whitebox
@@ -96,16 +95,19 @@ object sparkver {
}
}
+@nowarn("cat=unused") // 'vers' is used by macro
@compileTimeOnly("enable macro paradise to expand macro annotations")
final class sparkver(vers: String) extends StaticAnnotation {
def macroTransform(annottees: Any*): Any = macro sparkver.Macros.verEnable
}
+@nowarn("cat=unused") // 'vers' is used by macro
@compileTimeOnly("enable macro paradise to expand macro annotations")
final class sparkverEnableMembers(vers: String) extends StaticAnnotation {
def macroTransform(annottees: Any*): Any = macro sparkver.Macros.verEnableMembers
}
+@nowarn("cat=unused") // 'vers' is used by macro
@compileTimeOnly("enable macro paradise to expand macro annotations")
final class sparkverEnableOverride(vers: String) extends StaticAnnotation {
def macroTransform(annottees: Any*): Any = macro sparkver.Macros.verEnableOverride
diff --git a/thirdparty/auron-uniffle/src/main/scala/org/apache/spark/sql/execution/auron/shuffle/uniffle/AuronUniffleShuffleReader.scala b/thirdparty/auron-uniffle/src/main/scala/org/apache/spark/sql/execution/auron/shuffle/uniffle/AuronUniffleShuffleReader.scala
index 9a786b902..7dfe9e46f 100644
--- a/thirdparty/auron-uniffle/src/main/scala/org/apache/spark/sql/execution/auron/shuffle/uniffle/AuronUniffleShuffleReader.scala
+++ b/thirdparty/auron-uniffle/src/main/scala/org/apache/spark/sql/execution/auron/shuffle/uniffle/AuronUniffleShuffleReader.scala
@@ -20,6 +20,7 @@ import java.io.InputStream
import java.nio.ByteBuffer
import java.util
+import scala.annotation.nowarn
import scala.collection.AbstractIterator
import org.apache.commons.lang3.reflect.FieldUtils
@@ -40,6 +41,7 @@ import org.apache.uniffle.common.config.RssConf
import org.apache.uniffle.common.exception.RssException
import org.apache.uniffle.shaded.org.roaringbitmap.longlong.Roaring64NavigableMap
+@nowarn("cat=unused") // Some params temporarily unused
class AuronUniffleShuffleReader[K, C](
reader: RssShuffleReader[K, C],
handle: RssShuffleHandleWrapper[K, _, C],
@@ -207,6 +209,7 @@ class AuronUniffleShuffleReader[K, C](
}
}
+ @nowarn("cat=unused") // Some params temporarily unused
private class UniffleInputStream(
iterator: MultiPartitionIterator[_, _],
shuffleId: Int,