Showing content from http://spark.apache.org/docs/latest/api/scala/org/apache/spark/sql/SQLContext.html below:
Spark 4.0.0 ScalaDoc - org.apache.spark.sql.SQLContext
abstract class SQLContext extends Logging with Serializable
î·î
Ordering
- Grouped
- Alphabetic
- By Inheritance
Inherited
- SQLContext
- Serializable
- Logging
- AnyRef
- Any
Visibility
- Public
- Protected
Type Members
- î
implicit class LogStringContext extends AnyRef
Concrete Value Members
- î
final def !=(arg0: Any): Boolean
- î
final def ##: Int
- î
final def ==(arg0: Any): Boolean
- î
final def asInstanceOf[T0]: T0
- î
def baseRelationToDataFrame(baseRelation: BaseRelation): Dataset[Row]
- î
def cacheTable(tableName: String): Unit
- î
def clearCache(): Unit
- î
def clone(): AnyRef
- î
def createDataFrame(data: List[_], beanClass: Class[_]): Dataset[Row]
- î
def createDataFrame(rdd: JavaRDD[_], beanClass: Class[_]): Dataset[Row]
- î
def createDataFrame(rdd: RDD[_], beanClass: Class[_]): Dataset[Row]
- î
def createDataFrame(rows: List[Row], schema: StructType): Dataset[Row]
- î
def createDataFrame(rowRDD: JavaRDD[Row], schema: StructType): Dataset[Row]
- î
def createDataFrame(rowRDD: RDD[Row], schema: StructType): Dataset[Row]
- î
def createDataFrame[A <: Product](data: Seq[A])(implicit arg0: scala.reflect.api.JavaUniverse.TypeTag[A]): Dataset[Row]
- î
def createDataFrame[A <: Product](rdd: RDD[A])(implicit arg0: scala.reflect.api.JavaUniverse.TypeTag[A]): Dataset[Row]
- î
def createDataset[T](data: List[T])(implicit arg0: Encoder[T]): Dataset[T]
- î
def createDataset[T](data: RDD[T])(implicit arg0: Encoder[T]): Dataset[T]
- î
def createDataset[T](data: Seq[T])(implicit arg0: Encoder[T]): Dataset[T]
- î
def dropTempTable(tableName: String): Unit
- î
def emptyDataFrame: Dataset[Row]
- î
final def eq(arg0: AnyRef): Boolean
- î
def equals(arg0: AnyRef): Boolean
- î
def getAllConfs: Map[String, String]
- î
final def getClass(): Class[_ <: AnyRef]
- î
def getConf(key: String, defaultValue: String): String
- î
def getConf(key: String): String
- î
def hashCode(): Int
- î
def initializeLogIfNecessary(isInterpreter: Boolean, silent: Boolean): Boolean
- î
def initializeLogIfNecessary(isInterpreter: Boolean): Unit
- î
def isCached(tableName: String): Boolean
- î
final def isInstanceOf[T0]: Boolean
- î
def isTraceEnabled(): Boolean
- î
def log: Logger
- î
def logDebug(msg: => String, throwable: Throwable): Unit
- î
def logDebug(entry: LogEntry, throwable: Throwable): Unit
- î
def logDebug(entry: LogEntry): Unit
- î
def logDebug(msg: => String): Unit
- î
def logError(msg: => String, throwable: Throwable): Unit
- î
def logError(entry: LogEntry, throwable: Throwable): Unit
- î
def logError(entry: LogEntry): Unit
- î
def logError(msg: => String): Unit
- î
def logInfo(msg: => String, throwable: Throwable): Unit
- î
def logInfo(entry: LogEntry, throwable: Throwable): Unit
- î
def logInfo(entry: LogEntry): Unit
- î
def logInfo(msg: => String): Unit
- î
def logName: String
- î
def logTrace(msg: => String, throwable: Throwable): Unit
- î
def logTrace(entry: LogEntry, throwable: Throwable): Unit
- î
def logTrace(entry: LogEntry): Unit
- î
def logTrace(msg: => String): Unit
- î
def logWarning(msg: => String, throwable: Throwable): Unit
- î
def logWarning(entry: LogEntry, throwable: Throwable): Unit
- î
def logWarning(entry: LogEntry): Unit
- î
def logWarning(msg: => String): Unit
- î
final def ne(arg0: AnyRef): Boolean
- î
final def notify(): Unit
- î
final def notifyAll(): Unit
- î
def range(start: Long, end: Long, step: Long, numPartitions: Int): Dataset[Row]
- î
def range(start: Long, end: Long, step: Long): Dataset[Row]
- î
def range(start: Long, end: Long): Dataset[Row]
- î
def range(end: Long): Dataset[Row]
- î
def setConf(key: String, value: String): Unit
- î
def sparkContext: SparkContext
- î
val sparkSession: SparkSession
- î
def sql(sqlText: String): Dataset[Row]
- î
final def synchronized[T0](arg0: => T0): T0
- î
def table(tableName: String): Dataset[Row]
- î
def tableNames(databaseName: String): Array[String]
- î
def tableNames(): Array[String]
- î
def tables(databaseName: String): Dataset[Row]
- î
def tables(): Dataset[Row]
- î
def toString(): String
- î
def uncacheTable(tableName: String): Unit
- î
final def wait(arg0: Long, arg1: Int): Unit
- î
final def wait(arg0: Long): Unit
- î
final def wait(): Unit
- î
def withLogContext(context: Map[String, String])(body: => Unit): Unit
Deprecated Value Members
- î
def applySchema(rdd: JavaRDD[_], beanClass: Class[_]): Dataset[Row]
- î
def applySchema(rdd: RDD[_], beanClass: Class[_]): Dataset[Row]
- î
def applySchema(rowRDD: JavaRDD[Row], schema: StructType): Dataset[Row]
- î
def applySchema(rowRDD: RDD[Row], schema: StructType): Dataset[Row]
- î
def createExternalTable(tableName: String, source: String, schema: StructType, options: Map[String, String]): Dataset[Row]
- î
def createExternalTable(tableName: String, source: String, schema: StructType, options: Map[String, String]): Dataset[Row]
- î
def createExternalTable(tableName: String, source: String, options: Map[String, String]): Dataset[Row]
- î
def createExternalTable(tableName: String, source: String, options: Map[String, String]): Dataset[Row]
- î
def createExternalTable(tableName: String, path: String, source: String): Dataset[Row]
- î
def createExternalTable(tableName: String, path: String): Dataset[Row]
- î
def finalize(): Unit
- î
def jdbc(url: String, table: String, theParts: Array[String]): Dataset[Row]
- î
def jdbc(url: String, table: String, columnName: String, lowerBound: Long, upperBound: Long, numPartitions: Int): Dataset[Row]
- î
def jdbc(url: String, table: String): Dataset[Row]
- î
def jsonFile(path: String, samplingRatio: Double): Dataset[Row]
- î
def jsonFile(path: String, schema: StructType): Dataset[Row]
- î
def jsonFile(path: String): Dataset[Row]
- î
def jsonRDD(json: JavaRDD[String], samplingRatio: Double): Dataset[Row]
- î
def jsonRDD(json: RDD[String], samplingRatio: Double): Dataset[Row]
- î
def jsonRDD(json: JavaRDD[String], schema: StructType): Dataset[Row]
- î
def jsonRDD(json: RDD[String], schema: StructType): Dataset[Row]
- î
def jsonRDD(json: JavaRDD[String]): Dataset[Row]
- î
def jsonRDD(json: RDD[String]): Dataset[Row]
- î
def load(source: String, schema: StructType, options: Map[String, String]): Dataset[Row]
- î
def load(source: String, schema: StructType, options: Map[String, String]): Dataset[Row]
- î
def load(source: String, options: Map[String, String]): Dataset[Row]
- î
def load(source: String, options: Map[String, String]): Dataset[Row]
- î
def load(path: String, source: String): Dataset[Row]
- î
def load(path: String): Dataset[Row]
- î
def parquetFile(paths: String*): Dataset[Row]
Inherited from Logging
Inherited from AnyRef
Inherited from Any
Basic Operations
Cached Table Management
Configuration
dataframe
Custom DataFrame Creation
Custom Dataset Creation
Persistent Catalog DDL
Generic Data Sources
Specific Data Sources
Support functions for language integrated queries
RetroSearch is an open source project built by @garambo
| Open a GitHub Issue
Search and Browse the WWW like it's 1997 | Search results from DuckDuckGo
HTML:
3.2
| Encoding:
UTF-8
| Version:
0.7.4