object TrampolineUtil
- Alphabetic
- By Inheritance
- TrampolineUtil
- AnyRef
- Any
- Hide All
- Show All
- Public
- All
Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
def
addShutdownHook(priority: Int, runnable: Runnable): AnyRef
Add shutdown hook with priority
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
- def asNullable(dt: DataType): DataType
-
def
bytesToString(size: Long): String
Get a human-readable string, e.g.: "4.0 MiB", for a value in bytes.
-
def
cleanupAnyExistingSession(): Unit
Shuts down and cleans up any existing Spark session
-
def
clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native() @HotSpotIntrinsicCandidate()
-
def
dataTypeExistsRecursively(dt: DataType, f: (DataType) ⇒ Boolean): Boolean
Return true if the provided predicate function returns true for any type node within the datatype tree.
- def doExecuteBroadcast[T](child: SparkPlan): Broadcast[T]
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
equals(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
- def fromAttributes(attrs: Seq[Attribute]): StructType
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
- Annotations
- @native() @HotSpotIntrinsicCandidate()
-
def
getFSBytesReadOnThreadCallback(): () ⇒ Long
Returns a function that can be called to find Hadoop FileSystem bytes read.
Returns a function that can be called to find Hadoop FileSystem bytes read. If getFSBytesReadOnThreadCallback is called from thread r at time t, the returned callback will return the bytes read on r since t.
-
def
getSimpleName(cls: Class[_]): String
Get the simple name of a class with fixup for any Scala internal errors
- def getTaskMemoryManager(): TaskMemoryManager
-
def
hashCode(): Int
- Definition Classes
- AnyRef → Any
- Annotations
- @native() @HotSpotIntrinsicCandidate()
-
def
incBytesRead(inputMetrics: InputMetrics, bytesRead: Long): Unit
Set the bytes read task input metric
- def incInputRecordsRows(inputMetrics: InputMetrics, rows: Long): Unit
-
def
incTaskMetricsDiskBytesSpilled(amountSpilled: Long): Unit
Increment the task's disk bytes spilled metric.
Increment the task's disk bytes spilled metric. If the current thread does not correspond to a Spark task then this call does nothing.
- amountSpilled
amount of memory spilled in bytes
-
def
incTaskMetricsMemoryBytesSpilled(amountSpilled: Long): Unit
Increment the task's memory bytes spilled metric.
Increment the task's memory bytes spilled metric. If the current thread does not correspond to a Spark task then this call does nothing.
- amountSpilled
amount of memory spilled in bytes
- def isDriver(sparkConf: SparkConf): Boolean
-
def
isDriver(env: SparkEnv): Boolean
Returns true if called from code running on the Spark driver.
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
- def isSupportedRelation(mode: BroadcastMode): Boolean
- def jsonValue(dataType: DataType): JValue
- def makeSparkUpgradeException(version: String, message: String, cause: Throwable): SparkUpgradeException
-
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
newBlockManagerId(execId: String, host: String, port: Int, topologyInfo: Option[String] = None): BlockManagerId
Create a
BlockManagerIdinstance -
def
newInputMetrics(): InputMetrics
Return a new InputMetrics instance
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native() @HotSpotIntrinsicCandidate()
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native() @HotSpotIntrinsicCandidate()
-
def
sameType(a: DataType, b: DataType): Boolean
Check if
aandbare the same data type when ignoring nullability (StructField.nullable,ArrayType.containsNull, andMapType.valueContainsNull). -
def
setTaskContext(tc: TaskContext): Unit
Set the task context for the current thread
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
-
def
throwAnalysisException(msg: String): Nothing
Throw a Spark analysis exception
- def toAttributes(structType: StructType): Seq[Attribute]
-
def
toString(): String
- Definition Classes
- AnyRef → Any
- def unionLikeMerge(left: DataType, right: DataType): DataType
-
def
unsetTaskContext(): Unit
Remove the task context for the current thread
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )