A RetroSearch Logo

Home - News ( United States | United Kingdom | Italy | Germany ) - Football scores

Search Query:

Showing content from https://api-docs.databricks.com/scala/spark/latest/org/apache/spark/sql/SQLContext.html below:

Databricks Scala Spark API - org.apache.spark.sql.SQLContext

class SQLContext extends Logging with Serializable

 

Ordering
  1. Grouped
  2. Alphabetic
  3. By Inheritance
Inherited
  1. SQLContext
  2. Serializable
  3. Serializable
  4. Logging
  5. AnyRef
  6. Any
Value Members
  1.  final def !=(arg0: Any): Boolean
  2.  final def ##(): Int
  3.  final def ==(arg0: Any): Boolean
  4.  final def asInstanceOf[T0]: T0
  5.  def baseRelationToDataFrame(baseRelation: BaseRelation): DataFrame
  6.  def cacheTable(tableName: String): Unit
  7.  def clearCache(): Unit
  8.  def clone(): AnyRef
  9.  def createDataFrame(data: List[_], beanClass: Class[_]): DataFrame
  10.  def createDataFrame(rdd: JavaRDD[_], beanClass: Class[_]): DataFrame
  11.  def createDataFrame(rdd: RDD[_], beanClass: Class[_]): DataFrame
  12.  def createDataFrame(rows: List[Row], schema: StructType): DataFrame
  13.  def createDataFrame(rowRDD: JavaRDD[Row], schema: StructType): DataFrame
  14.  def createDataFrame(rowRDD: RDD[Row], schema: StructType): DataFrame
  15.  def createDataFrame[A <: Product](data: Seq[A])(implicit arg0: scala.reflect.api.JavaUniverse.TypeTag[A]): DataFrame
  16.  def createDataFrame[A <: Product](rdd: RDD[A])(implicit arg0: scala.reflect.api.JavaUniverse.TypeTag[A]): DataFrame
  17.  def createDataset[T](data: List[T])(implicit arg0: Encoder[T]): Dataset[T]
  18.  def createDataset[T](data: RDD[T])(implicit arg0: Encoder[T]): Dataset[T]
  19.  def createDataset[T](data: Seq[T])(implicit arg0: Encoder[T]): Dataset[T]
  20.  def dropTempTable(tableName: String): Unit
  21.  def emptyDataFrame: DataFrame
  22.  final def eq(arg0: AnyRef): Boolean
  23.  def equals(arg0: Any): Boolean
  24.  def experimental: ExperimentalMethods
  25.  def finalize(): Unit
  26.  def getAllConfs: Map[String, String]
  27.  final def getClass(): Class[_]
  28.  def getConf(key: String, defaultValue: String): String
  29.  def getConf(key: String): String
  30.  def hashCode(): Int
  31.  def initializeLogIfNecessary(isInterpreter: Boolean, silent: Boolean): Boolean
  32.  def initializeLogIfNecessary(isInterpreter: Boolean): Unit
  33.  def isCached(tableName: String): Boolean
  34.  final def isInstanceOf[T0]: Boolean
  35.  def isTraceEnabled(): Boolean
  36.  def listenerManager: ExecutionListenerManager
  37.  def log: Logger
  38.  def logDebug(msg: ⇒ String, throwable: Throwable): Unit
  39.  def logDebug(msg: ⇒ String): Unit
  40.  def logError(msg: ⇒ String, throwable: Throwable): Unit
  41.  def logError(msg: ⇒ String): Unit
  42.  def logInfo(msg: ⇒ String, throwable: Throwable): Unit
  43.  def logInfo(msg: ⇒ String): Unit
  44.  def logName: String
  45.  def logTrace(msg: ⇒ String, throwable: Throwable): Unit
  46.  def logTrace(msg: ⇒ String): Unit
  47.  def logWarning(msg: ⇒ String, throwable: Throwable): Unit
  48.  def logWarning(msg: ⇒ String): Unit
  49.  final def ne(arg0: AnyRef): Boolean
  50.  def newSession(): SQLContext
  51.  final def notify(): Unit
  52.  final def notifyAll(): Unit
  53.  def range(start: Long, end: Long, step: Long, numPartitions: Int): DataFrame
  54.  def range(start: Long, end: Long, step: Long): DataFrame
  55.  def range(start: Long, end: Long): DataFrame
  56.  def range(end: Long): DataFrame
  57.  def read: DataFrameReader
  58.  def readStream: DataStreamReader
  59.  def setConf(key: String, value: String): Unit
  60.  def setConf(props: Properties): Unit
  61.  def sparkContext: SparkContext
  62.  val sparkSession: SparkSession
  63.  def sql(sqlText: String): DataFrame
  64.  def streams: StreamingQueryManager
  65.  final def synchronized[T0](arg0: ⇒ T0): T0
  66.  def table(tableName: String): DataFrame
  67.  def tableNames(databaseName: String): Array[String]
  68.  def tableNames(): Array[String]
  69.  def tables(databaseName: String): DataFrame
  70.  def tables(): DataFrame
  71.  def toString(): String
  72.  def udf: UDFRegistration
  73.  def uncacheTable(tableName: String): Unit
  74.  final def wait(): Unit
  75.  final def wait(arg0: Long, arg1: Int): Unit
  76.  final def wait(arg0: Long): Unit
  77.  object implicits extends SQLImplicits with Serializable
Deprecated Value Members
  1.  def applySchema(rdd: JavaRDD[_], beanClass: Class[_]): DataFrame
  2.  def applySchema(rdd: RDD[_], beanClass: Class[_]): DataFrame
  3.  def applySchema(rowRDD: JavaRDD[Row], schema: StructType): DataFrame
  4.  def applySchema(rowRDD: RDD[Row], schema: StructType): DataFrame
  5.  def createExternalTable(tableName: String, source: String, schema: StructType, options: Map[String, String]): DataFrame
  6.  def createExternalTable(tableName: String, source: String, schema: StructType, options: Map[String, String]): DataFrame
  7.  def createExternalTable(tableName: String, source: String, options: Map[String, String]): DataFrame
  8.  def createExternalTable(tableName: String, source: String, options: Map[String, String]): DataFrame
  9.  def createExternalTable(tableName: String, path: String, source: String): DataFrame
  10.  def createExternalTable(tableName: String, path: String): DataFrame
  11.  def jdbc(url: String, table: String, theParts: Array[String]): DataFrame
  12.  def jdbc(url: String, table: String, columnName: String, lowerBound: Long, upperBound: Long, numPartitions: Int): DataFrame
  13.  def jdbc(url: String, table: String): DataFrame
  14.  def jsonFile(path: String, samplingRatio: Double): DataFrame
  15.  def jsonFile(path: String, schema: StructType): DataFrame
  16.  def jsonFile(path: String): DataFrame
  17.  def jsonRDD(json: JavaRDD[String], samplingRatio: Double): DataFrame
  18.  def jsonRDD(json: RDD[String], samplingRatio: Double): DataFrame
  19.  def jsonRDD(json: JavaRDD[String], schema: StructType): DataFrame
  20.  def jsonRDD(json: RDD[String], schema: StructType): DataFrame
  21.  def jsonRDD(json: JavaRDD[String]): DataFrame
  22.  def jsonRDD(json: RDD[String]): DataFrame
  23.  def load(source: String, schema: StructType, options: Map[String, String]): DataFrame
  24.  def load(source: String, schema: StructType, options: Map[String, String]): DataFrame
  25.  def load(source: String, options: Map[String, String]): DataFrame
  26.  def load(source: String, options: Map[String, String]): DataFrame
  27.  def load(path: String, source: String): DataFrame
  28.  def load(path: String): DataFrame
  29.  def parquetFile(paths: String*): DataFrame

Inherited from Serializable

Inherited from Serializable

Inherited from Logging

Inherited from Any

Basic Operations

Cached Table Management

Configuration

dataframe

Custom DataFrame Creation

Custom Dataset Creation

Persistent Catalog DDL

Generic Data Sources

Specific Data Sources

Support functions for language integrated queries


RetroSearch is an open source project built by @garambo | Open a GitHub Issue

Search and Browse the WWW like it's 1997 | Search results from DuckDuckGo

HTML: 3.2 | Encoding: UTF-8 | Version: 0.7.4