Packages

c

com.mapr.db.spark.sql.v2

MapRDBTable

case class MapRDBTable(schema: StructType, tablePath: String, hintedIndexes: Array[String], readersPerTablet: Int) extends org.apache.spark.sql.connector.catalog.Table with SupportsRead with Product with Serializable

Linear Supertypes
Serializable, Serializable, Product, Equals, SupportsRead, org.apache.spark.sql.connector.catalog.Table, AnyRef, Any
Ordering
  1. Alphabetic
  2. By Inheritance
Inherited
  1. MapRDBTable
  2. Serializable
  3. Serializable
  4. Product
  5. Equals
  6. SupportsRead
  7. Table
  8. AnyRef
  9. Any
  1. Hide All
  2. Show All
Visibility
  1. Public
  2. All

Instance Constructors

  1. new MapRDBTable(schema: StructType, tablePath: String, hintedIndexes: Array[String], readersPerTablet: Int)

Value Members

  1. final def !=(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  2. final def ##(): Int
    Definition Classes
    AnyRef → Any
  3. final def ==(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  4. final def asInstanceOf[T0]: T0
    Definition Classes
    Any
  5. def capabilities(): Set[TableCapability]

    Returns the set of capabilities for this table.

    Returns the set of capabilities for this table.

    Definition Classes
    MapRDBTable → Table
  6. def clone(): AnyRef
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( ... ) @native() @HotSpotIntrinsicCandidate()
  7. final def eq(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  8. final def getClass(): Class[_]
    Definition Classes
    AnyRef → Any
    Annotations
    @native() @HotSpotIntrinsicCandidate()
  9. val hintedIndexes: Array[String]
  10. final def isInstanceOf[T0]: Boolean
    Definition Classes
    Any
  11. def name(): String

    A name to identify this table.

    A name to identify this table. Implementations should provide a meaningful name, like the database and table name from catalog, or the location of files for this table.

    Definition Classes
    MapRDBTable → Table
  12. final def ne(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  13. def newScanBuilder(options: CaseInsensitiveStringMap): ScanBuilder

    Returns a ScanBuilder which can be used to build a Scan.

    Returns a ScanBuilder which can be used to build a Scan. Spark will call this method to configure each data source scan.

    options

    The options for reading, which is an immutable case-insensitive string-to-string map.

    Definition Classes
    MapRDBTable → SupportsRead
  14. final def notify(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native() @HotSpotIntrinsicCandidate()
  15. final def notifyAll(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native() @HotSpotIntrinsicCandidate()
  16. def partitioning(): Array[Transform]
    Definition Classes
    Table
  17. def properties(): Map[String, String]
    Definition Classes
    Table
  18. val readersPerTablet: Int
  19. val schema: StructType
    Definition Classes
    MapRDBTable → Table
  20. final def synchronized[T0](arg0: ⇒ T0): T0
    Definition Classes
    AnyRef
  21. val tablePath: String
  22. final def wait(arg0: Long, arg1: Int): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  23. final def wait(arg0: Long): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... ) @native()
  24. final def wait(): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )

Deprecated Value Members

  1. def finalize(): Unit
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( classOf[java.lang.Throwable] ) @Deprecated
    Deprecated

Inherited from Serializable

Inherited from Serializable

Inherited from Product

Inherited from Equals

Inherited from SupportsRead

Inherited from org.apache.spark.sql.connector.catalog.Table

Inherited from AnyRef

Inherited from Any

Ungrouped