trait Spark320PlusNonDBShims extends SparkShims
Shim methods that can be compiled with every supported 3.2.0+ except Databricks versions
- Alphabetic
- By Inheritance
- Spark320PlusNonDBShims
- SparkShims
- AnyRef
- Any
- Hide All
- Show All
- Public
- All
Abstract Value Members
-
abstract
def
ansiCastRule: ExprRule[_ <: Expression]
Return the replacement rule for AnsiCast.
Return the replacement rule for AnsiCast. 'AnsiCast' is removed from Spark 3.4.0, so need to handle it separately.
- Definition Classes
- SparkShims
-
abstract
def
aqeShuffleReaderExec: ExecRule[_ <: SparkPlan]
- Definition Classes
- SparkShims
-
abstract
def
attachTreeIfSupported[TreeType <: TreeNode[_], A](tree: TreeType, msg: String = "")(f: ⇒ A): A
dropped by SPARK-34234
dropped by SPARK-34234
- Definition Classes
- SparkShims
-
abstract
def
avroRebaseReadKey: String
- Definition Classes
- SparkShims
-
abstract
def
avroRebaseWriteKey: String
- Definition Classes
- SparkShims
-
abstract
def
columnarAdaptivePlan(a: AdaptiveSparkPlanExec, goal: CoalesceSizeGoal): SparkPlan
- Definition Classes
- SparkShims
-
abstract
def
findOperators(plan: SparkPlan, predicate: (SparkPlan) ⇒ Boolean): Seq[SparkPlan]
Walk the plan recursively and return a list of operators that match the predicate
Walk the plan recursively and return a list of operators that match the predicate
- Definition Classes
- SparkShims
-
abstract
def
getAdaptiveInputPlan(adaptivePlan: AdaptiveSparkPlanExec): SparkPlan
- Definition Classes
- SparkShims
-
abstract
def
getDateFormatter(): DateFormatter
- Definition Classes
- SparkShims
-
abstract
def
getExecs: Map[Class[_ <: SparkPlan], ExecRule[_ <: SparkPlan]]
- Definition Classes
- SparkShims
-
abstract
def
getExprs: Map[Class[_ <: Expression], ExprRule[_ <: Expression]]
- Definition Classes
- SparkShims
-
abstract
def
getFileScanRDD(sparkSession: SparkSession, readFunction: (PartitionedFile) ⇒ Iterator[InternalRow], filePartitions: Seq[FilePartition], readDataSchema: StructType, metadataColumns: Seq[AttributeReference] = Seq.empty): RDD[InternalRow]
- Definition Classes
- SparkShims
-
abstract
def
getParquetFilters(schema: MessageType, pushDownDate: Boolean, pushDownTimestamp: Boolean, pushDownDecimal: Boolean, pushDownStartWith: Boolean, pushDownInFilterThreshold: Int, caseSensitive: Boolean, lookupFileMeta: (String) ⇒ String, dateTimeRebaseModeFromConf: String): ParquetFilters
- Definition Classes
- SparkShims
-
abstract
def
getScans: Map[Class[_ <: Scan], ScanRule[_ <: Scan]]
- Definition Classes
- SparkShims
-
abstract
def
getSparkShimVersion: ShimVersion
- Definition Classes
- SparkShims
-
abstract
def
hasAliasQuoteFix: Boolean
- Definition Classes
- SparkShims
-
abstract
def
hasCastFloatTimestampUpcast: Boolean
- Definition Classes
- SparkShims
-
abstract
def
int96ParquetRebaseRead(conf: SQLConf): String
- Definition Classes
- SparkShims
-
abstract
def
int96ParquetRebaseReadKey: String
- Definition Classes
- SparkShims
-
abstract
def
int96ParquetRebaseWrite(conf: SQLConf): String
- Definition Classes
- SparkShims
-
abstract
def
int96ParquetRebaseWriteKey: String
- Definition Classes
- SparkShims
-
abstract
def
isAqePlan(p: SparkPlan): Boolean
- Definition Classes
- SparkShims
-
abstract
def
isCustomReaderExec(x: SparkPlan): Boolean
- Definition Classes
- SparkShims
-
abstract
def
isEmptyRelation(relation: Any): Boolean
- Definition Classes
- SparkShims
-
abstract
def
isExchangeOp(plan: SparkPlanMeta[_]): Boolean
- Definition Classes
- SparkShims
-
abstract
def
isWindowFunctionExec(plan: SparkPlan): Boolean
- Definition Classes
- SparkShims
-
abstract
def
leafNodeDefaultParallelism(ss: SparkSession): Int
- Definition Classes
- SparkShims
-
abstract
def
neverReplaceShowCurrentNamespaceCommand: ExecRule[_ <: SparkPlan]
- Definition Classes
- SparkShims
-
abstract
def
parquetRebaseRead(conf: SQLConf): String
- Definition Classes
- SparkShims
-
abstract
def
parquetRebaseReadKey: String
- Definition Classes
- SparkShims
-
abstract
def
parquetRebaseWrite(conf: SQLConf): String
- Definition Classes
- SparkShims
-
abstract
def
parquetRebaseWriteKey: String
- Definition Classes
- SparkShims
-
abstract
def
sessionFromPlan(plan: SparkPlan): SparkSession
- Definition Classes
- SparkShims
-
abstract
def
shouldFailDivOverflow: Boolean
- Definition Classes
- SparkShims
-
abstract
def
skipAssertIsOnTheGpu(plan: SparkPlan): Boolean
Our tests, by default, will check that all operators are running on the GPU, but there are some operators that we do not translate to GPU plans, so we need a way to bypass the check for those.
Our tests, by default, will check that all operators are running on the GPU, but there are some operators that we do not translate to GPU plans, so we need a way to bypass the check for those.
- Definition Classes
- SparkShims
-
abstract
def
supportsColumnarAdaptivePlans: Boolean
Determine if the Spark version allows the supportsColumnar flag to be overridden in AdaptiveSparkPlanExec.
Determine if the Spark version allows the supportsColumnar flag to be overridden in AdaptiveSparkPlanExec. This feature was introduced in Spark 3.2 as part of SPARK-35881.
- Definition Classes
- SparkShims
-
abstract
def
tryTransformIfEmptyRelation(mode: BroadcastMode): Option[Any]
This call can produce an
EmptyHashedRelationor an empty array, allowing the AQE ruleEliminateJoinToEmptyRelationin Spark 3.1.x to optimize certain joins.This call can produce an
EmptyHashedRelationor an empty array, allowing the AQE ruleEliminateJoinToEmptyRelationin Spark 3.1.x to optimize certain joins.In Spark 3.2.0, the optimization is still performed (under
AQEPropagateEmptyRelation), but the AQE optimizer is looking at the metrics for the query stage to determine if numRows == 0, and if so it can eliminate certain joins.The call is implemented only for Spark 3.1.x+. It is disabled in Databricks because it requires a task context to perform the
BroadcastMode.transformcall, but we'd like to call this from the driver.- Definition Classes
- SparkShims
-
abstract
def
v1RepairTableCommand(tableName: TableIdentifier): RunnableCommand
- Definition Classes
- SparkShims
Concrete Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
-
final
def
broadcastModeTransform(mode: BroadcastMode, rows: Array[InternalRow]): Any
- Definition Classes
- Spark320PlusNonDBShims → SparkShims
-
def
clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native() @HotSpotIntrinsicCandidate()
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
equals(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
filesFromFileIndex(fileIndex: PartitioningAwareFileIndex): Seq[FileStatus]
- Definition Classes
- Spark320PlusNonDBShims → SparkShims
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
- Annotations
- @native() @HotSpotIntrinsicCandidate()
- def getWindowExpressions(winPy: WindowInPandasExec): Seq[NamedExpression]
-
def
hashCode(): Int
- Definition Classes
- AnyRef → Any
- Annotations
- @native() @HotSpotIntrinsicCandidate()
-
def
isCastingStringToNegDecimalScaleSupported: Boolean
- Definition Classes
- SparkShims
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
-
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
final
def
newBroadcastQueryStageExec(old: BroadcastQueryStageExec, newPlan: SparkPlan): BroadcastQueryStageExec
- Definition Classes
- Spark320PlusNonDBShims → SparkShims
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native() @HotSpotIntrinsicCandidate()
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native() @HotSpotIntrinsicCandidate()
-
def
reusedExchangeExecPfn: PartialFunction[SparkPlan, ReusedExchangeExec]
Case class ShuffleQueryStageExec holds an additional field shuffleOrigin affecting the unapply method signature
Case class ShuffleQueryStageExec holds an additional field shuffleOrigin affecting the unapply method signature
- Definition Classes
- Spark320PlusNonDBShims → SparkShims
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
-
def
toString(): String
- Definition Classes
- AnyRef → Any
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )