case class SparkContextFunctions(sc: SparkContext, bufferWrites: Boolean = true, hintUsingIndex: Option[String] = None, queryOptions: Map[String, String] = Map[String, String]()) extends Serializable with Product
Linear Supertypes
Ordering
- Alphabetic
- By Inheritance
Inherited
- SparkContextFunctions
- Product
- Equals
- Serializable
- Serializable
- AnyRef
- Any
- Hide All
- Show All
Visibility
- Public
- All
Instance Constructors
- new SparkContextFunctions(sc: SparkContext, bufferWrites: Boolean = true, hintUsingIndex: Option[String] = None, queryOptions: Map[String, String] = Map[String, String]())
Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
- val bufferWrites: Boolean
-
def
clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native() @HotSpotIntrinsicCandidate()
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
- Annotations
- @native() @HotSpotIntrinsicCandidate()
- val hintUsingIndex: Option[String]
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
-
def
loadFromMapRDB[T](tableName: String)(implicit arg0: ClassTag[T], e: DefaultType[T, OJAIDocument], f: RDDTYPE[T]): MapRDBTableScanRDD[T]
Spark MapRDB connector specific functions to load json tables as RDD[OJAIDocument]
Spark MapRDB connector specific functions to load json tables as RDD[OJAIDocument]
- tableName
name of the table in MapRDB
val docs = sc.loadMapRDBTable("tablePath")
Example: -
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native() @HotSpotIntrinsicCandidate()
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native() @HotSpotIntrinsicCandidate()
- val queryOptions: Map[String, String]
- val sc: SparkContext
- def setBufferWrites(bufferWrites: Boolean): SparkContextFunctions
- def setHintUsingIndex(indexPath: String): SparkContextFunctions
- def setQueryOption(queryOptionKey: String, queryOptionValue: String): SparkContextFunctions
- def setQueryOptions(queryOptions: Map[String, String]): SparkContextFunctions
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
Deprecated Value Members
-
def
finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( classOf[java.lang.Throwable] ) @Deprecated
- Deprecated