abstract class Feature[Serializable1, Serializable2, TComplete] extends Serializable

Linear Supertypes
Serializable, Serializable, AnyRef, Any
Ordering
  1. Alphabetic
  2. By Inheritance
Inherited
  1. Feature
  2. Serializable
  3. Serializable
  4. AnyRef
  5. Any
  1. Hide All
  2. Show All
Visibility
  1. Public
  2. All

Instance Constructors

  1. new Feature(model: HasFeatures, name: String)(implicit arg0: ClassTag[TComplete])

Abstract Value Members

  1. abstract def deserializeDataset(spark: SparkSession, path: String, field: String): Option[_]
    Attributes
    protected
  2. abstract def deserializeObject(spark: SparkSession, path: String, field: String): Option[_]
    Attributes
    protected
  3. abstract def serializeDataset(spark: SparkSession, path: String, field: String, value: TComplete): Unit
    Attributes
    protected
  4. abstract def serializeObject(spark: SparkSession, path: String, field: String, value: TComplete): Unit
    Attributes
    protected

Concrete Value Members

  1. final def !=(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  2. final def ##(): Int
    Definition Classes
    AnyRef → Any
  3. final def ==(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  4. final def asInstanceOf[T0]: T0
    Definition Classes
    Any
  5. final var broadcastValue: Option[Broadcast[TComplete]]
    Attributes
    protected
  6. def clone(): AnyRef
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( ... ) @native()
  7. final def deserialize(spark: SparkSession, path: String, field: String): Option[_]
  8. def deserializeWithFallback[ObjectType](spark: SparkSession, path: String)(implicit arg0: ClassTag[ObjectType]): RDD[ObjectType]

    Loads an object from a SequenceFile containing serialized objects.

    Loads an object from a SequenceFile containing serialized objects. It tries to load the tuple across Scala version, handling serialVersionUID mismatches.

    Adapted from sparkContext.objectFile:

    "Load an RDD saved as a SequenceFile containing serialized objects, with NullWritable keys and BytesWritable values that contain a serialized partition."

    path

    directory to the input data files, the path can be comma separated paths as a list of inputs

    returns

    RDD representing deserialized data from the file(s)

    Attributes
    protected
  9. final def eq(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  10. def equals(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  11. final var fallbackLazyValue: Option[() ⇒ TComplete]
    Attributes
    protected
  12. final var fallbackRawValue: Option[TComplete]
    Attributes
    protected
  13. def finalize(): Unit
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( classOf[java.lang.Throwable] )
  14. final def get: Option[TComplete]
  15. final def getClass(): Class[_]
    Definition Classes
    AnyRef → Any
    Annotations
    @native()
  16. final def getFieldPath(path: String, field: String): Path
    Attributes
    protected
  17. final def getOrDefault: TComplete
  18. def hashCode(): Int
    Definition Classes
    AnyRef → Any
    Annotations
    @native()
  19. final def isInstanceOf[T0]: Boolean
    Definition Classes
    Any
  20. final var isProtected: Boolean
    Attributes
    protected
  21. final def isSet: Boolean
  22. lazy val logger: Logger
    Attributes
    protected
  23. val name: String
  24. final def ne(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  25. final def notify(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  26. final def notifyAll(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  27. final def orDefault: Option[TComplete]
  28. final var rawValue: Option[TComplete]
    Attributes
    protected
  29. def resolveCustomLegacyClasses(osc: ObjectStreamClass): ObjectStreamClass

    Method to provide additional legacy class descriptor mappings for custom classes.

    Method to provide additional legacy class descriptor mappings for custom classes. If none are found, should return null.

    osc

    The ObjectStreamClass to resolve

    returns

    The resolved ObjectStreamClass, or null if not found

    Attributes
    protected
  30. val serializationMode: String
  31. final def serialize(spark: SparkSession, path: String, field: String, value: TComplete): Unit
  32. final def serializeInfer(spark: SparkSession, path: String, field: String, value: Any): Unit
  33. def setFallback(v: Option[() ⇒ TComplete]): HasFeatures
  34. final def setProtected(): Feature.this.type

    Sets this feature to be protected and only settable once.

    Sets this feature to be protected and only settable once.

    returns

    This Feature

  35. final def setValue(value: Option[Any]): HasFeatures
  36. final def synchronized[T0](arg0: ⇒ T0): T0
    Definition Classes
    AnyRef
  37. def toString(): String
    Definition Classes
    AnyRef → Any
  38. val useBroadcast: Boolean
  39. final def wait(): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  40. final def wait(arg0: Long, arg1: Int): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  41. final def wait(arg0: Long): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... ) @native()

Inherited from Serializable

Inherited from Serializable

Inherited from AnyRef

Inherited from Any

Ungrouped