Packages

object GpuOverrides extends Logging with Serializable

Linear Supertypes
Serializable, Serializable, Logging, AnyRef, Any
Ordering
  1. Alphabetic
  2. By Inheritance
Inherited
  1. GpuOverrides
  2. Serializable
  3. Serializable
  4. Logging
  5. AnyRef
  6. Any
  1. Hide All
  2. Show All
Visibility
  1. Public
  2. All

Value Members

  1. final def !=(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  2. final def ##(): Int
    Definition Classes
    AnyRef → Any
  3. final def ==(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  4. val CASE_MODIFICATION_INCOMPAT: String
  5. val FLOAT_DIFFERS_GROUP_INCOMPAT: String
  6. val UTC_TIMEZONE_ID: ZoneId
  7. def addListener(listener: GpuOverridesListener): Unit
  8. def areAllSupportedTypes(types: DataType*): Boolean
  9. final def asInstanceOf[T0]: T0
    Definition Classes
    Any
  10. def checkAndTagAnsiAgg(checkType: Option[DataType], meta: AggExprMeta[_]): Unit

    Helper function specific to ANSI mode for the aggregate functions that should fallback, since we don't have the same overflow checks that Spark provides in the CPU

    Helper function specific to ANSI mode for the aggregate functions that should fallback, since we don't have the same overflow checks that Spark provides in the CPU

    checkType

    Something other than None triggers logic to detect whether the agg should fallback in ANSI mode. Otherwise (None), it's an automatic fallback.

    meta

    agg expression meta

  11. def checkAndTagFloatAgg(dataType: DataType, conf: RapidsConf, meta: RapidsMeta[_, _, _]): Unit
  12. def checkAndTagFloatNanAgg(op: String, dataType: DataType, conf: RapidsConf, meta: RapidsMeta[_, _, _]): Unit
  13. def clone(): AnyRef
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( ... ) @native() @HotSpotIntrinsicCandidate()
  14. val commonExecs: Map[Class[_ <: SparkPlan], ExecRule[_ <: SparkPlan]]
  15. val commonExpressions: Map[Class[_ <: Expression], ExprRule[_ <: Expression]]
  16. val commonScans: Map[Class[_ <: Scan], ScanRule[_ <: Scan]]
  17. def dataWriteCmd[INPUT <: DataWritingCommand](desc: String, doWrap: (INPUT, RapidsConf, Option[RapidsMeta[_, _, _]], DataFromReplacementRule) ⇒ DataWritingCommandMeta[INPUT])(implicit tag: ClassTag[INPUT]): DataWritingCommandRule[INPUT]
  18. val dataWriteCmds: Map[Class[_ <: DataWritingCommand], DataWritingCommandRule[_ <: DataWritingCommand]]
  19. final def eq(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  20. def equals(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  21. def exec[INPUT <: SparkPlan](desc: String, pluginChecks: ExecChecks, doWrap: (INPUT, RapidsConf, Option[RapidsMeta[_, _, _]], DataFromReplacementRule) ⇒ SparkPlanMeta[INPUT])(implicit tag: ClassTag[INPUT]): ExecRule[INPUT]
  22. lazy val execs: Map[Class[_ <: SparkPlan], ExecRule[_ <: SparkPlan]]
  23. def explainPotentialGpuPlan(df: DataFrame, explain: String): String

    Only run the explain and don't actually convert or run on GPU.

    Only run the explain and don't actually convert or run on GPU. This gets the plan from the dataframe so it's after catalyst has run through all the rules to modify the plan. This means we have to try to undo some of the last rules to make it close to when the columnar rules would normally run on the plan.

  24. def expr[INPUT <: Expression](desc: String, pluginChecks: ExprChecks, doWrap: (INPUT, RapidsConf, Option[RapidsMeta[_, _, _]], DataFromReplacementRule) ⇒ BaseExprMeta[INPUT])(implicit tag: ClassTag[INPUT]): ExprRule[INPUT]
  25. val expressions: Map[Class[_ <: Expression], ExprRule[_ <: Expression]]
  26. def extractLit(exp: Expression): Option[Literal]
    Annotations
    @tailrec()
  27. def extractStringLit(exp: Expression): Option[String]
  28. lazy val fileFormats: Map[FileFormatType, Map[FileFormatOp, FileFormatChecks]]
  29. def fixupReusedExchangeExecs(plan: SparkPlan): SparkPlan

    Searches the plan for ReusedExchangeExec instances containing a GPU shuffle where the output types between the two plan nodes do not match.

    Searches the plan for ReusedExchangeExec instances containing a GPU shuffle where the output types between the two plan nodes do not match. In such a case the ReusedExchangeExec will be updated to match the GPU shuffle output types.

  30. final def getClass(): Class[_]
    Definition Classes
    AnyRef → Any
    Annotations
    @native() @HotSpotIntrinsicCandidate()
  31. def getTimeParserPolicy: TimeParserPolicy
  32. def hashCode(): Int
    Definition Classes
    AnyRef → Any
    Annotations
    @native() @HotSpotIntrinsicCandidate()
  33. def initializeLogIfNecessary(isInterpreter: Boolean, silent: Boolean): Boolean
    Attributes
    protected
    Definition Classes
    Logging
  34. def initializeLogIfNecessary(isInterpreter: Boolean): Unit
    Attributes
    protected
    Definition Classes
    Logging
  35. def isAnyStringLit(expressions: Seq[Expression]): Boolean

    Checks to see if any expressions are a String Literal

  36. final def isInstanceOf[T0]: Boolean
    Definition Classes
    Any
  37. def isLit(exp: Expression): Boolean
  38. def isNullLit(lit: Literal): Boolean
  39. def isOfType(l: Option[Literal], t: DataType): Boolean
  40. def isOrContainsFloatingPoint(dataType: DataType): Boolean
  41. def isStringLit(exp: Expression): Boolean
  42. def isSupportedStringReplacePattern(exp: Expression): Boolean
  43. def isSupportedType(dataType: DataType, allowNull: Boolean = false, allowDecimal: Boolean = false, allowBinary: Boolean = false, allowCalendarInterval: Boolean = false, allowArray: Boolean = false, allowStruct: Boolean = false, allowStringMaps: Boolean = false, allowMaps: Boolean = false, allowNesting: Boolean = false): Boolean

    Is this particular type supported or not.

    Is this particular type supported or not.

    dataType

    the type to check

    allowNull

    should NullType be allowed

    allowDecimal

    should DecimalType be allowed

    allowBinary

    should BinaryType be allowed

    allowCalendarInterval

    should CalendarIntervalType be allowed

    allowArray

    should ArrayType be allowed

    allowStruct

    should StructType be allowed

    allowStringMaps

    should a Map[String, String] specifically be allowed

    allowMaps

    should MapType be allowed generically

    allowNesting

    should nested types like array struct and map allow nested types within them, or only primitive types.

    returns

    true if it is allowed else false

  44. def isTraceEnabled(): Boolean
    Attributes
    protected
    Definition Classes
    Logging
  45. def log: Logger
    Attributes
    protected
    Definition Classes
    Logging
  46. def logDebug(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  47. def logDebug(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  48. def logDuration[T](shouldLog: Boolean, msg: (Double) ⇒ String)(block: ⇒ T): T

    Provides a way to log an info message about how long an operation took in milliseconds.

  49. def logError(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  50. def logError(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  51. def logInfo(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  52. def logInfo(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  53. def logName: String
    Attributes
    protected
    Definition Classes
    Logging
  54. def logTrace(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  55. def logTrace(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  56. def logWarning(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  57. def logWarning(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  58. final def ne(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  59. def neverReplaceExec[INPUT <: SparkPlan](desc: String)(implicit tag: ClassTag[INPUT]): ExecRule[INPUT]

    Create an exec rule that should never be replaced, because it is something that should always run on the CPU, or should just be ignored totally for what ever reason.

  60. final def notify(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native() @HotSpotIntrinsicCandidate()
  61. final def notifyAll(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native() @HotSpotIntrinsicCandidate()
  62. def part[INPUT <: Partitioning](desc: String, checks: PartChecks, doWrap: (INPUT, RapidsConf, Option[RapidsMeta[_, _, _]], DataFromReplacementRule) ⇒ PartMeta[INPUT])(implicit tag: ClassTag[INPUT]): PartRule[INPUT]
  63. val parts: Map[Class[_ <: Partitioning], PartRule[_ <: Partitioning]]
  64. val pluginSupportedOrderableSig: TypeSig
  65. val postColToRowProjection: TreeNodeTag[Seq[NamedExpression]]
  66. val preRowToColProjection: TreeNodeTag[Seq[NamedExpression]]
  67. def removeAllListeners(): Unit
  68. def removeExtraneousShuffles(plan: SparkPlan, conf: RapidsConf): SparkPlan

    Removes unnecessary CPU shuffles that Spark can add to the plan when it does not realize a GPU partitioning satisfies a CPU distribution because CPU and GPU expressions are not semantically equal.

  69. def removeListener(listener: GpuOverridesListener): Unit
  70. def scan[INPUT <: Scan](desc: String, doWrap: (INPUT, RapidsConf, Option[RapidsMeta[_, _, _]], DataFromReplacementRule) ⇒ ScanMeta[INPUT])(implicit tag: ClassTag[INPUT]): ScanRule[INPUT]
  71. val scans: Map[Class[_ <: Scan], ScanRule[_ <: Scan]]
  72. final def synchronized[T0](arg0: ⇒ T0): T0
    Definition Classes
    AnyRef
  73. def toString(): String
    Definition Classes
    AnyRef → Any
  74. final def wait(arg0: Long, arg1: Int): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  75. final def wait(arg0: Long): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... ) @native()
  76. final def wait(): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  77. def wrapAndTagPlan(plan: SparkPlan, conf: RapidsConf): SparkPlanMeta[SparkPlan]
  78. def wrapDataWriteCmds[INPUT <: DataWritingCommand](writeCmd: INPUT, conf: RapidsConf, parent: Option[RapidsMeta[_, _, _]]): DataWritingCommandMeta[INPUT]
  79. def wrapExpr[INPUT <: Expression](expr: INPUT, conf: RapidsConf, parent: Option[RapidsMeta[_, _, _]]): BaseExprMeta[INPUT]
  80. def wrapPart[INPUT <: Partitioning](part: INPUT, conf: RapidsConf, parent: Option[RapidsMeta[_, _, _]]): PartMeta[INPUT]
  81. def wrapPlan[INPUT <: SparkPlan](plan: INPUT, conf: RapidsConf, parent: Option[RapidsMeta[_, _, _]]): SparkPlanMeta[INPUT]
  82. def wrapScan[INPUT <: Scan](scan: INPUT, conf: RapidsConf, parent: Option[RapidsMeta[_, _, _]]): ScanMeta[INPUT]

Deprecated Value Members

  1. def finalize(): Unit
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( classOf[java.lang.Throwable] ) @Deprecated
    Deprecated

Inherited from Serializable

Inherited from Serializable

Inherited from Logging

Inherited from AnyRef

Inherited from Any

Ungrouped