Showing content from https://api-docs.databricks.com/scala/spark/latest/org/apache/spark/index.html below:
Databricks Scala Spark API - org.apache.spark
Databricks Scala Spark API < Back
î

package spark
î· î
Ordering
- Alphabetic
- By Inheritance
Inherited
- spark
- AnyRef
- Any
Type Members
- î
case class Aggregator[K, V, C](createCombiner: (V) â C, mergeValue: (C, V) â C, mergeCombiners: (C, C) â C) extends Product with Serializable
- î
class BarrierTaskContext extends TaskContext with Logging
- î
class BarrierTaskInfo extends AnyRef
- î
class ComplexFutureAction[T] extends FutureAction[T]
- î
class ContextAwareIterator[+T] extends Iterator[T]
- î
abstract class Dependency[T] extends Serializable
- î
case class ExceptionFailure(className: String, description: String, stackTrace: Array[StackTraceElement], fullStackTrace: String, exceptionWrapper: Option[ThrowableSerializationWrapper], accumUpdates: Seq[AccumulableInfo] = Seq.empty, accums: Seq[AccumulatorV2[_, _]] = Nil, metricPeaks: Seq[Long] = Seq.empty) extends TaskFailedReason with Product with Serializable
- î
case class ExecutorLostFailure(execId: String, exitCausedByApp: Boolean = true, reason: Option[String]) extends TaskFailedReason with Product with Serializable
- î
case class FetchFailed(bmAddress: BlockManagerId, shuffleId: Int, mapId: Long, mapIndex: Int, reduceId: Int, message: String) extends TaskFailedReason with Product with Serializable
- î
trait FutureAction[T] extends Future[T]
- î
class HashPartitioner extends Partitioner
- î
class InterruptibleIterator[+T] extends Iterator[T]
- î
sealed abstract final class JobExecutionStatus extends Enum[JobExecutionStatus]
- î
trait JobSubmitter extends AnyRef
- î
abstract class NarrowDependency[T] extends Dependency[T]
- î
class OneToOneDependency[T] extends NarrowDependency[T]
- î
trait Partition extends Serializable
- î
abstract class Partitioner extends Serializable
- î
class RangeDependency[T] extends NarrowDependency[T]
- î
class RangePartitioner[K, V] extends Partitioner
- î
class SerializableWritable[T <: Writable] extends Serializable
- î
class ShuffleDependency[K, V, C] extends Dependency[Product2[K, V]] with Logging
- î
class SimpleFutureAction[T] extends FutureAction[T]
- î
class SparkConf extends Cloneable with Logging with Serializable
- î
class SparkContext extends Logging
- î
class SparkEnv extends Logging
- î
class SparkException extends Exception with SparkThrowable
- î
trait SparkExecutorInfo extends Serializable
- î
class SparkFirehoseListener extends SparkListenerInterface
- î
trait SparkJobInfo extends Serializable
- î
trait SparkStageInfo extends Serializable
- î
class SparkStatusTracker extends AnyRef
- î
trait SparkThrowable extends AnyRef
- î
case class TaskCommitDenied(jobID: Int, partitionID: Int, attemptNumber: Int) extends TaskFailedReason with Product with Serializable
- î
abstract class TaskContext extends Serializable
- î
sealed trait TaskEndReason extends AnyRef
- î
sealed trait TaskFailedReason extends TaskEndReason
- î
case class TaskKilled(reason: String, accumUpdates: Seq[AccumulableInfo] = Seq.empty, accums: Seq[AccumulatorV2[_, _]] = Nil, metricPeaks: Seq[Long] = Seq.empty) extends TaskFailedReason with Product with Serializable
- î
class TaskKilledException extends RuntimeException
RetroSearch is an open source project built by @garambo
| Open a GitHub Issue
Search and Browse the WWW like it's 1997 | Search results from DuckDuckGo
HTML:
3.2
| Encoding:
UTF-8
| Version:
0.7.4