t

com.nvidia.spark.rapids.shims

Spark320PlusShims

trait Spark320PlusShims extends SparkShims with RebaseShims with Logging

Shim base class that can be compiled with every supported 3.2.0+

Linear Supertypes
Known Subclasses
Ordering
  1. Alphabetic
  2. By Inheritance
Inherited
  1. Spark320PlusShims
  2. Logging
  3. RebaseShims
  4. SparkShims
  5. AnyRef
  6. Any
  1. Hide All
  2. Show All
Visibility
  1. Public
  2. All

Abstract Value Members

  1. abstract def ansiCastRule: ExprRule[_ <: Expression]

    Return the replacement rule for AnsiCast.

    Return the replacement rule for AnsiCast. 'AnsiCast' is removed from Spark 3.4.0, so need to handle it separately.

    Definition Classes
    SparkShims
  2. abstract def broadcastModeTransform(mode: BroadcastMode, toArray: Array[InternalRow]): Any
    Definition Classes
    SparkShims
  3. abstract def filesFromFileIndex(fileCatalog: PartitioningAwareFileIndex): Seq[FileStatus]
    Definition Classes
    SparkShims
  4. abstract def getFileScanRDD(sparkSession: SparkSession, readFunction: (PartitionedFile) ⇒ Iterator[InternalRow], filePartitions: Seq[FilePartition], readDataSchema: StructType, metadataColumns: Seq[AttributeReference] = Seq.empty): RDD[InternalRow]
    Definition Classes
    SparkShims
  5. abstract def getParquetFilters(schema: MessageType, pushDownDate: Boolean, pushDownTimestamp: Boolean, pushDownDecimal: Boolean, pushDownStartWith: Boolean, pushDownInFilterThreshold: Int, caseSensitive: Boolean, lookupFileMeta: (String) ⇒ String, dateTimeRebaseModeFromConf: String): ParquetFilters
    Definition Classes
    SparkShims
  6. abstract def getSparkShimVersion: ShimVersion
    Definition Classes
    SparkShims
  7. abstract def getWindowExpressions(winPy: WindowInPandasExec): Seq[NamedExpression]
  8. abstract def neverReplaceShowCurrentNamespaceCommand: ExecRule[_ <: SparkPlan]
    Definition Classes
    SparkShims
  9. abstract def newBroadcastQueryStageExec(old: BroadcastQueryStageExec, newPlan: SparkPlan): BroadcastQueryStageExec
    Definition Classes
    SparkShims
  10. abstract def reusedExchangeExecPfn: PartialFunction[SparkPlan, ReusedExchangeExec]
    Definition Classes
    SparkShims

Concrete Value Members

  1. final def !=(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  2. final def ##(): Int
    Definition Classes
    AnyRef → Any
  3. final def ==(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  4. final def aqeShuffleReaderExec: ExecRule[_ <: SparkPlan]
    Definition Classes
    Spark320PlusShimsSparkShims
  5. final def asInstanceOf[T0]: T0
    Definition Classes
    Any
  6. def attachTreeIfSupported[TreeType <: TreeNode[_], A](tree: TreeType, msg: String)(f: ⇒ A): A

    dropped by SPARK-34234

    dropped by SPARK-34234

    Definition Classes
    Spark320PlusShimsSparkShims
  7. final def avroRebaseReadKey: String
    Definition Classes
    RebaseShims
  8. final def avroRebaseWriteKey: String
    Definition Classes
    RebaseShims
  9. def clone(): AnyRef
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( ... ) @native() @HotSpotIntrinsicCandidate()
  10. def columnarAdaptivePlan(a: AdaptiveSparkPlanExec, goal: CoalesceSizeGoal): SparkPlan
    Definition Classes
    Spark320PlusShimsSparkShims
  11. final def eq(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  12. def equals(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  13. def findOperators(plan: SparkPlan, predicate: (SparkPlan) ⇒ Boolean): Seq[SparkPlan]

    Walk the plan recursively and return a list of operators that match the predicate

    Walk the plan recursively and return a list of operators that match the predicate

    Definition Classes
    Spark320PlusShimsSparkShims
  14. def getAdaptiveInputPlan(adaptivePlan: AdaptiveSparkPlanExec): SparkPlan
    Definition Classes
    Spark320PlusShimsSparkShims
  15. final def getClass(): Class[_]
    Definition Classes
    AnyRef → Any
    Annotations
    @native() @HotSpotIntrinsicCandidate()
  16. def getDateFormatter(): DateFormatter
    Definition Classes
    Spark320PlusShimsSparkShims
  17. def getExecs: Map[Class[_ <: SparkPlan], ExecRule[_ <: SparkPlan]]
    Definition Classes
    Spark320PlusShimsSparkShims
  18. def getExprs: Map[Class[_ <: Expression], ExprRule[_ <: Expression]]
    Definition Classes
    Spark320PlusShimsSparkShims
  19. def getScans: Map[Class[_ <: Scan], ScanRule[_ <: Scan]]
    Definition Classes
    Spark320PlusShimsSparkShims
  20. def hasAliasQuoteFix: Boolean
    Definition Classes
    Spark320PlusShimsSparkShims
  21. def hasCastFloatTimestampUpcast: Boolean
    Definition Classes
    Spark320PlusShimsSparkShims
  22. def hashCode(): Int
    Definition Classes
    AnyRef → Any
    Annotations
    @native() @HotSpotIntrinsicCandidate()
  23. def initializeLogIfNecessary(isInterpreter: Boolean, silent: Boolean): Boolean
    Attributes
    protected
    Definition Classes
    Logging
  24. def initializeLogIfNecessary(isInterpreter: Boolean): Unit
    Attributes
    protected
    Definition Classes
    Logging
  25. def int96ParquetRebaseRead(conf: SQLConf): String
    Definition Classes
    RebaseShims
  26. def int96ParquetRebaseReadKey: String
    Definition Classes
    RebaseShims
  27. def int96ParquetRebaseWrite(conf: SQLConf): String
    Definition Classes
    RebaseShims
  28. def int96ParquetRebaseWriteKey: String
    Definition Classes
    RebaseShims
  29. final def isAqePlan(p: SparkPlan): Boolean
    Definition Classes
    Spark320PlusShimsSparkShims
  30. def isCastingStringToNegDecimalScaleSupported: Boolean
    Definition Classes
    SparkShims
  31. def isCustomReaderExec(x: SparkPlan): Boolean
    Definition Classes
    Spark320PlusShimsSparkShims
  32. def isEmptyRelation(relation: Any): Boolean
    Definition Classes
    Spark320PlusShimsSparkShims
  33. final def isExchangeOp(plan: SparkPlanMeta[_]): Boolean
    Definition Classes
    Spark320PlusShimsSparkShims
  34. final def isInstanceOf[T0]: Boolean
    Definition Classes
    Any
  35. def isTraceEnabled(): Boolean
    Attributes
    protected
    Definition Classes
    Logging
  36. def isWindowFunctionExec(plan: SparkPlan): Boolean
    Definition Classes
    Spark320PlusShimsSparkShims
  37. def leafNodeDefaultParallelism(ss: SparkSession): Int
    Definition Classes
    Spark320PlusShimsSparkShims
  38. def log: Logger
    Attributes
    protected
    Definition Classes
    Logging
  39. def logDebug(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  40. def logDebug(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  41. def logError(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  42. def logError(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  43. def logInfo(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  44. def logInfo(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  45. def logName: String
    Attributes
    protected
    Definition Classes
    Logging
  46. def logTrace(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  47. def logTrace(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  48. def logWarning(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  49. def logWarning(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  50. final def ne(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  51. final def notify(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native() @HotSpotIntrinsicCandidate()
  52. final def notifyAll(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native() @HotSpotIntrinsicCandidate()
  53. final def parquetRebaseRead(conf: SQLConf): String
    Definition Classes
    RebaseShims
  54. final def parquetRebaseReadKey: String
    Definition Classes
    RebaseShims
  55. final def parquetRebaseWrite(conf: SQLConf): String
    Definition Classes
    RebaseShims
  56. final def parquetRebaseWriteKey: String
    Definition Classes
    RebaseShims
  57. final def sessionFromPlan(plan: SparkPlan): SparkSession
    Definition Classes
    Spark320PlusShimsSparkShims
  58. def shouldFailDivOverflow(): Boolean
    Definition Classes
    Spark320PlusShimsSparkShims
  59. def skipAssertIsOnTheGpu(plan: SparkPlan): Boolean

    Our tests, by default, will check that all operators are running on the GPU, but there are some operators that we do not translate to GPU plans, so we need a way to bypass the check for those.

    Our tests, by default, will check that all operators are running on the GPU, but there are some operators that we do not translate to GPU plans, so we need a way to bypass the check for those.

    Definition Classes
    Spark320PlusShimsSparkShims
  60. def supportsColumnarAdaptivePlans: Boolean

    Determine if the Spark version allows the supportsColumnar flag to be overridden in AdaptiveSparkPlanExec.

    Determine if the Spark version allows the supportsColumnar flag to be overridden in AdaptiveSparkPlanExec. This feature was introduced in Spark 3.2 as part of SPARK-35881.

    Definition Classes
    Spark320PlusShimsSparkShims
  61. final def synchronized[T0](arg0: ⇒ T0): T0
    Definition Classes
    AnyRef
  62. def tagFileSourceScanExec(meta: SparkPlanMeta[FileSourceScanExec]): Unit
  63. def toString(): String
    Definition Classes
    AnyRef → Any
  64. def tryTransformIfEmptyRelation(mode: BroadcastMode): Option[Any]

    This call can produce an EmptyHashedRelation or an empty array, allowing the AQE rule EliminateJoinToEmptyRelation in Spark 3.1.x to optimize certain joins.

    This call can produce an EmptyHashedRelation or an empty array, allowing the AQE rule EliminateJoinToEmptyRelation in Spark 3.1.x to optimize certain joins.

    In Spark 3.2.0, the optimization is still performed (under AQEPropagateEmptyRelation), but the AQE optimizer is looking at the metrics for the query stage to determine if numRows == 0, and if so it can eliminate certain joins.

    The call is implemented only for Spark 3.1.x+. It is disabled in Databricks because it requires a task context to perform the BroadcastMode.transform call, but we'd like to call this from the driver.

    Definition Classes
    Spark320PlusShimsSparkShims
  65. def v1RepairTableCommand(tableName: TableIdentifier): RunnableCommand
    Definition Classes
    Spark320PlusShimsSparkShims
  66. final def wait(arg0: Long, arg1: Int): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  67. final def wait(arg0: Long): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... ) @native()
  68. final def wait(): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )

Deprecated Value Members

  1. def finalize(): Unit
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( classOf[java.lang.Throwable] ) @Deprecated
    Deprecated

Inherited from Logging

Inherited from RebaseShims

Inherited from SparkShims

Inherited from AnyRef

Inherited from Any

Ungrouped