Packages

object ClusterUtil

Linear Supertypes
Ordering
  1. Alphabetic
  2. By Inheritance
Inherited
  1. ClusterUtil
  2. AnyRef
  3. Any
  1. Hide All
  2. Show All
Visibility
  1. Public
  2. All

Value Members

  1. final def !=(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  2. final def ##(): Int
    Definition Classes
    AnyRef → Any
  3. final def ==(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  4. final def asInstanceOf[T0]: T0
    Definition Classes
    Any
  5. def clone(): AnyRef
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( ... ) @native()
  6. final def eq(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  7. def equals(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  8. def finalize(): Unit
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( classOf[java.lang.Throwable] )
  9. final def getClass(): Class[_]
    Definition Classes
    AnyRef → Any
    Annotations
    @native()
  10. def getDefaultNumExecutorCores(spark: SparkSession, log: Logger, master: Option[String] = None): Int

    Get number of default cores from sparkSession(required) or master(optional) for 1 executor.

    Get number of default cores from sparkSession(required) or master(optional) for 1 executor.

    spark

    The current spark session. If master parameter is not set, the master in the spark session is used.

    master

    This param is needed for unittest. If set, the function return the value for it. if not set, basically, master in spark (SparkSession) is used.

    returns

    The number of default cores per executor based on master.

  11. def getDriverHost(spark: SparkSession): String
  12. def getExecutors(spark: SparkSession): Array[(Int, String)]

    Returns a list of executor id and host.

    Returns a list of executor id and host.

    spark

    The current spark session.

    returns

    List of executors as an array of (id,host).

  13. def getHostToIP(hostname: String): String
  14. def getJVMCPUs(spark: SparkSession): Int
  15. def getNumExecutorTasks(spark: SparkSession, numTasksPerExec: Int, log: Logger): Int

    Returns the number of executors * number of tasks.

    Returns the number of executors * number of tasks.

    numTasksPerExec

    The number of tasks per executor.

    returns

    The number of executors * number of tasks.

  16. def getNumTasksPerExecutor(dataset: Dataset[_], log: Logger): Int

    Get number of tasks from dummy dataset for 1 executor.

    Get number of tasks from dummy dataset for 1 executor. Note: all executors have same number of cores, and this is more reliable than getting value from conf.

    dataset

    The dataset containing the current spark session.

    returns

    The number of tasks per executor.

  17. def getTaskCpus(dataset: Dataset[_], log: Logger): Int
  18. def hashCode(): Int
    Definition Classes
    AnyRef → Any
    Annotations
    @native()
  19. final def isInstanceOf[T0]: Boolean
    Definition Classes
    Any
  20. final def ne(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  21. final def notify(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  22. final def notifyAll(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  23. final def synchronized[T0](arg0: ⇒ T0): T0
    Definition Classes
    AnyRef
  24. def toString(): String
    Definition Classes
    AnyRef → Any
  25. final def wait(): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  26. final def wait(arg0: Long, arg1: Int): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  27. final def wait(arg0: Long): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... ) @native()

Inherited from AnyRef

Inherited from Any

Ungrouped