trait SparkShims extends AnyRef
- Alphabetic
- By Inheritance
- SparkShims
- AnyRef
- Any
- Hide All
- Show All
- Public
- All
Abstract Value Members
-
abstract
def
ansiCastRule: ExprRule[_ <: Expression]
Return the replacement rule for AnsiCast.
Return the replacement rule for AnsiCast. 'AnsiCast' is removed from Spark 3.4.0, so need to handle it separately.
- abstract def aqeShuffleReaderExec: ExecRule[_ <: SparkPlan]
-
abstract
def
attachTreeIfSupported[TreeType <: TreeNode[_], A](tree: TreeType, msg: String = "")(f: ⇒ A): A
dropped by SPARK-34234
- abstract def avroRebaseReadKey: String
- abstract def avroRebaseWriteKey: String
- abstract def broadcastModeTransform(mode: BroadcastMode, toArray: Array[InternalRow]): Any
- abstract def columnarAdaptivePlan(a: AdaptiveSparkPlanExec, goal: CoalesceSizeGoal): SparkPlan
- abstract def filesFromFileIndex(fileCatalog: PartitioningAwareFileIndex): Seq[FileStatus]
-
abstract
def
findOperators(plan: SparkPlan, predicate: (SparkPlan) ⇒ Boolean): Seq[SparkPlan]
Walk the plan recursively and return a list of operators that match the predicate
- abstract def getAdaptiveInputPlan(adaptivePlan: AdaptiveSparkPlanExec): SparkPlan
- abstract def getDateFormatter(): DateFormatter
- abstract def getExecs: Map[Class[_ <: SparkPlan], ExecRule[_ <: SparkPlan]]
- abstract def getExprs: Map[Class[_ <: Expression], ExprRule[_ <: Expression]]
- abstract def getFileScanRDD(sparkSession: SparkSession, readFunction: (PartitionedFile) ⇒ Iterator[InternalRow], filePartitions: Seq[FilePartition], readDataSchema: StructType, metadataColumns: Seq[AttributeReference] = Seq.empty): RDD[InternalRow]
- abstract def getParquetFilters(schema: MessageType, pushDownDate: Boolean, pushDownTimestamp: Boolean, pushDownDecimal: Boolean, pushDownStartWith: Boolean, pushDownInFilterThreshold: Int, caseSensitive: Boolean, lookupFileMeta: (String) ⇒ String, dateTimeRebaseModeFromConf: String): ParquetFilters
- abstract def getScans: Map[Class[_ <: Scan], ScanRule[_ <: Scan]]
- abstract def getSparkShimVersion: ShimVersion
- abstract def hasAliasQuoteFix: Boolean
- abstract def hasCastFloatTimestampUpcast: Boolean
- abstract def int96ParquetRebaseRead(conf: SQLConf): String
- abstract def int96ParquetRebaseReadKey: String
- abstract def int96ParquetRebaseWrite(conf: SQLConf): String
- abstract def int96ParquetRebaseWriteKey: String
- abstract def isAqePlan(p: SparkPlan): Boolean
- abstract def isCustomReaderExec(x: SparkPlan): Boolean
- abstract def isEmptyRelation(relation: Any): Boolean
- abstract def isExchangeOp(plan: SparkPlanMeta[_]): Boolean
- abstract def isWindowFunctionExec(plan: SparkPlan): Boolean
- abstract def leafNodeDefaultParallelism(ss: SparkSession): Int
- abstract def neverReplaceShowCurrentNamespaceCommand: ExecRule[_ <: SparkPlan]
- abstract def newBroadcastQueryStageExec(old: BroadcastQueryStageExec, newPlan: SparkPlan): BroadcastQueryStageExec
- abstract def parquetRebaseRead(conf: SQLConf): String
- abstract def parquetRebaseReadKey: String
- abstract def parquetRebaseWrite(conf: SQLConf): String
- abstract def parquetRebaseWriteKey: String
- abstract def reusedExchangeExecPfn: PartialFunction[SparkPlan, ReusedExchangeExec]
- abstract def sessionFromPlan(plan: SparkPlan): SparkSession
- abstract def shouldFailDivOverflow: Boolean
-
abstract
def
skipAssertIsOnTheGpu(plan: SparkPlan): Boolean
Our tests, by default, will check that all operators are running on the GPU, but there are some operators that we do not translate to GPU plans, so we need a way to bypass the check for those.
-
abstract
def
supportsColumnarAdaptivePlans: Boolean
Determine if the Spark version allows the supportsColumnar flag to be overridden in AdaptiveSparkPlanExec.
Determine if the Spark version allows the supportsColumnar flag to be overridden in AdaptiveSparkPlanExec. This feature was introduced in Spark 3.2 as part of SPARK-35881.
-
abstract
def
tryTransformIfEmptyRelation(mode: BroadcastMode): Option[Any]
This call can produce an
EmptyHashedRelationor an empty array, allowing the AQE ruleEliminateJoinToEmptyRelationin Spark 3.1.x to optimize certain joins.This call can produce an
EmptyHashedRelationor an empty array, allowing the AQE ruleEliminateJoinToEmptyRelationin Spark 3.1.x to optimize certain joins.In Spark 3.2.0, the optimization is still performed (under
AQEPropagateEmptyRelation), but the AQE optimizer is looking at the metrics for the query stage to determine if numRows == 0, and if so it can eliminate certain joins.The call is implemented only for Spark 3.1.x+. It is disabled in Databricks because it requires a task context to perform the
BroadcastMode.transformcall, but we'd like to call this from the driver. - abstract def v1RepairTableCommand(tableName: TableIdentifier): RunnableCommand
Concrete Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
-
def
clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native() @HotSpotIntrinsicCandidate()
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
equals(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
- Annotations
- @native() @HotSpotIntrinsicCandidate()
-
def
hashCode(): Int
- Definition Classes
- AnyRef → Any
- Annotations
- @native() @HotSpotIntrinsicCandidate()
- def isCastingStringToNegDecimalScaleSupported: Boolean
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
-
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native() @HotSpotIntrinsicCandidate()
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native() @HotSpotIntrinsicCandidate()
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
-
def
toString(): String
- Definition Classes
- AnyRef → Any
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )