object DeltaTable extends Serializable

Companion object to create DeltaTable instances.

DeltaTable.forPath(sparkSession, pathToTheDeltaTable)
Since

0.3.0

Linear Supertypes
Serializable, Serializable, AnyRef, Any
Ordering
  1. Alphabetic
  2. By Inheritance
Inherited
  1. DeltaTable
  2. Serializable
  3. Serializable
  4. AnyRef
  5. Any
  1. Hide All
  2. Show All
Visibility
  1. Public
  2. All

Value Members

  1. final def !=(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  2. final def ##(): Int
    Definition Classes
    AnyRef → Any
  3. final def ==(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  4. final def asInstanceOf[T0]: T0
    Definition Classes
    Any
  5. def clone(): AnyRef
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( ... ) @native()
  6. def columnBuilder(spark: SparkSession, colName: String): DeltaColumnBuilder

    :: Evolving ::

    :: Evolving ::

    Return an instance of DeltaColumnBuilder to specify a column. Refer to DeltaTableBuilder for examples and DeltaColumnBuilder detailed APIs.

    spark

    sparkSession sparkSession passed by the user

    colName

    string the column name

    Annotations
    @Evolving()
    Since

    1.0.0

  7. def columnBuilder(colName: String): DeltaColumnBuilder

    :: Evolving ::

    :: Evolving ::

    Return an instance of DeltaColumnBuilder to specify a column. Refer to DeltaTableBuilder for examples and DeltaColumnBuilder detailed APIs.

    Note: This uses the active SparkSession in the current thread to read the table data. Hence, this throws error if active SparkSession has not been set, that is, SparkSession.getActiveSession() is empty.

    colName

    string the column name

    Annotations
    @Evolving()
    Since

    1.0.0

  8. def convertToDelta(spark: SparkSession, identifier: String): DeltaTable

    Create a DeltaTable from the given parquet table.

    Create a DeltaTable from the given parquet table. Takes an existing parquet table and constructs a delta transaction log in the base path of the table.

    Note: Any changes to the table during the conversion process may not result in a consistent state at the end of the conversion. Users should stop any changes to the table before the conversion is started.

    An Example would be

    io.delta.tables.DeltaTable.convertToDelta(
     spark,
     "parquet.`/path`"
    Since

    0.4.0

  9. def convertToDelta(spark: SparkSession, identifier: String, partitionSchema: String): DeltaTable

    Create a DeltaTable from the given parquet table and partition schema.

    Create a DeltaTable from the given parquet table and partition schema. Takes an existing parquet table and constructs a delta transaction log in the base path of that table.

    Note: Any changes to the table during the conversion process may not result in a consistent state at the end of the conversion. Users should stop any changes to the table before the conversion is started.

    An example usage would be

    io.delta.tables.DeltaTable.convertToDelta(
     spark,
     "parquet.`/path`",
     "key1 long, key2 string")
    Since

    0.4.0

  10. def convertToDelta(spark: SparkSession, identifier: String, partitionSchema: StructType): DeltaTable

    Create a DeltaTable from the given parquet table and partition schema.

    Create a DeltaTable from the given parquet table and partition schema. Takes an existing parquet table and constructs a delta transaction log in the base path of that table.

    Note: Any changes to the table during the conversion process may not result in a consistent state at the end of the conversion. Users should stop any changes to the table before the conversion is started.

    An example usage would be

    io.delta.tables.DeltaTable.convertToDelta(
     spark,
     "parquet.`/path`",
     new StructType().add(StructField("key1", LongType)).add(StructField("key2", StringType)))
    Since

    0.4.0

  11. def create(spark: SparkSession): DeltaTableBuilder

    :: Evolving ::

    :: Evolving ::

    Return an instance of DeltaTableBuilder to create a Delta table, error if the table exists (the same as SQL CREATE TABLE). Refer to DeltaTableBuilder for more details.

    spark

    sparkSession sparkSession passed by the user

    Annotations
    @Evolving()
    Since

    1.0.0

  12. def create(): DeltaTableBuilder

    :: Evolving ::

    :: Evolving ::

    Return an instance of DeltaTableBuilder to create a Delta table, error if the table exists (the same as SQL CREATE TABLE). Refer to DeltaTableBuilder for more details.

    Note: This uses the active SparkSession in the current thread to read the table data. Hence, this throws error if active SparkSession has not been set, that is, SparkSession.getActiveSession() is empty.

    Annotations
    @Evolving()
    Since

    1.0.0

  13. def createIfNotExists(spark: SparkSession): DeltaTableBuilder

    :: Evolving ::

    :: Evolving ::

    Return an instance of DeltaTableBuilder to create a Delta table, if it does not exists (the same as SQL CREATE TABLE IF NOT EXISTS). Refer to DeltaTableBuilder for more details.

    spark

    sparkSession sparkSession passed by the user

    Annotations
    @Evolving()
    Since

    1.0.0

  14. def createIfNotExists(): DeltaTableBuilder

    :: Evolving ::

    :: Evolving ::

    Return an instance of DeltaTableBuilder to create a Delta table, if it does not exists (the same as SQL CREATE TABLE IF NOT EXISTS). Refer to DeltaTableBuilder for more details.

    Note: This uses the active SparkSession in the current thread to read the table data. Hence, this throws error if active SparkSession has not been set, that is, SparkSession.getActiveSession() is empty.

    Annotations
    @Evolving()
    Since

    1.0.0

  15. def createOrReplace(spark: SparkSession): DeltaTableBuilder

    :: Evolving ::

    :: Evolving ::

    Return an instance of DeltaTableBuilder to replace a Delta table, or create table if not exists (the same as SQL CREATE OR REPLACE TABLE) Refer to DeltaTableBuilder for more details.

    spark

    sparkSession sparkSession passed by the user.

    Annotations
    @Evolving()
    Since

    1.0.0

  16. def createOrReplace(): DeltaTableBuilder

    :: Evolving ::

    :: Evolving ::

    Return an instance of DeltaTableBuilder to replace a Delta table or create table if not exists (the same as SQL CREATE OR REPLACE TABLE) Refer to DeltaTableBuilder for more details.

    Note: This uses the active SparkSession in the current thread to read the table data. Hence, this throws error if active SparkSession has not been set, that is, SparkSession.getActiveSession() is empty.

    Annotations
    @Evolving()
    Since

    1.0.0

  17. final def eq(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  18. def equals(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  19. def finalize(): Unit
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( classOf[java.lang.Throwable] )
  20. def forName(sparkSession: SparkSession, tableName: String): DeltaTable

    Instantiate a DeltaTable object using the given table name using the given SparkSession.

    Instantiate a DeltaTable object using the given table name using the given SparkSession. If the given tableName is invalid (i.e. either no table exists or an existing table is not a Delta table), it throws a not a Delta table error. Note: Passing a view name will also result in this error as views are not supported.

    The given tableName can also be the absolute path of a delta datasource (i.e. delta.path), If so, instantiate a DeltaTable object representing the data at the given path (consistent with the forPath).

  21. def forName(tableOrViewName: String): DeltaTable

    Instantiate a DeltaTable object using the given table name.

    Instantiate a DeltaTable object using the given table name. If the given tableOrViewName is invalid (i.e. either no table exists or an existing table is not a Delta table), it throws a not a Delta table error. Note: Passing a view name will also result in this error as views are not supported.

    The given tableOrViewName can also be the absolute path of a delta datasource (i.e. delta.path), If so, instantiate a DeltaTable object representing the data at the given path (consistent with the forPath).

    Note: This uses the active SparkSession in the current thread to read the table data. Hence, this throws error if active SparkSession has not been set, that is, SparkSession.getActiveSession() is empty.

  22. def forPath(sparkSession: SparkSession, path: String, hadoopConf: Map[String, String]): DeltaTable

    Java friendly API to instantiate a DeltaTable object representing the data at the given path, If the given path is invalid (i.e.

    Java friendly API to instantiate a DeltaTable object representing the data at the given path, If the given path is invalid (i.e. either no table exists or an existing table is not a Delta table), it throws a not a Delta table error.

    hadoopConf

    Hadoop configuration starting with "fs." or "dfs." will be picked up by DeltaTable to access the file system when executing queries. Other configurations will be ignored.

    val hadoopConf = Map(
      "fs.s3a.access.key" -> "<access-key>",
      "fs.s3a.secret.key", "<secret-key>"
    )
    DeltaTable.forPath(spark, "/path/to/table", hadoopConf)
    Since

    2.2.0

  23. def forPath(sparkSession: SparkSession, path: String, hadoopConf: Map[String, String]): DeltaTable

    Instantiate a DeltaTable object representing the data at the given path, If the given path is invalid (i.e.

    Instantiate a DeltaTable object representing the data at the given path, If the given path is invalid (i.e. either no table exists or an existing table is not a Delta table), it throws a not a Delta table error.

    hadoopConf

    Hadoop configuration starting with "fs." or "dfs." will be picked up by DeltaTable to access the file system when executing queries. Other configurations will not be allowed.

    val hadoopConf = Map(
      "fs.s3a.access.key" -> "<access-key>",
      "fs.s3a.secret.key" -> "<secret-key>"
    )
    DeltaTable.forPath(spark, "/path/to/table", hadoopConf)
    Since

    2.2.0

  24. def forPath(sparkSession: SparkSession, path: String): DeltaTable

    Instantiate a DeltaTable object representing the data at the given path, If the given path is invalid (i.e.

    Instantiate a DeltaTable object representing the data at the given path, If the given path is invalid (i.e. either no table exists or an existing table is not a Delta table), it throws a not a Delta table error.

    Since

    0.3.0

  25. def forPath(path: String): DeltaTable

    Instantiate a DeltaTable object representing the data at the given path, If the given path is invalid (i.e.

    Instantiate a DeltaTable object representing the data at the given path, If the given path is invalid (i.e. either no table exists or an existing table is not a Delta table), it throws a not a Delta table error.

    Note: This uses the active SparkSession in the current thread to read the table data. Hence, this throws error if active SparkSession has not been set, that is, SparkSession.getActiveSession() is empty.

    Since

    0.3.0

  26. final def getClass(): Class[_]
    Definition Classes
    AnyRef → Any
    Annotations
    @native()
  27. def hashCode(): Int
    Definition Classes
    AnyRef → Any
    Annotations
    @native()
  28. def isDeltaTable(identifier: String): Boolean

    Check if the provided identifier string, in this case a file path, is the root of a Delta table.

    Check if the provided identifier string, in this case a file path, is the root of a Delta table.

    Note: This uses the active SparkSession in the current thread to search for the table. Hence, this throws error if active SparkSession has not been set, that is, SparkSession.getActiveSession() is empty.

    An example would be

    DeltaTable.isDeltaTable(spark, "/path/to/table")
    Since

    0.4.0

  29. def isDeltaTable(sparkSession: SparkSession, identifier: String): Boolean

    Check if the provided identifier string, in this case a file path, is the root of a Delta table using the given SparkSession.

    Check if the provided identifier string, in this case a file path, is the root of a Delta table using the given SparkSession.

    An example would be

    DeltaTable.isDeltaTable(spark, "path/to/table")
    Since

    0.4.0

  30. final def isInstanceOf[T0]: Boolean
    Definition Classes
    Any
  31. final def ne(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  32. final def notify(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  33. final def notifyAll(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  34. def replace(spark: SparkSession): DeltaTableBuilder

    :: Evolving ::

    :: Evolving ::

    Return an instance of DeltaTableBuilder to replace a Delta table, error if the table doesn't exist (the same as SQL REPLACE TABLE) Refer to DeltaTableBuilder for more details.

    spark

    sparkSession sparkSession passed by the user

    Annotations
    @Evolving()
    Since

    1.0.0

  35. def replace(): DeltaTableBuilder

    :: Evolving ::

    :: Evolving ::

    Return an instance of DeltaTableBuilder to replace a Delta table, error if the table doesn't exist (the same as SQL REPLACE TABLE) Refer to DeltaTableBuilder for more details.

    Note: This uses the active SparkSession in the current thread to read the table data. Hence, this throws error if active SparkSession has not been set, that is, SparkSession.getActiveSession() is empty.

    Annotations
    @Evolving()
    Since

    1.0.0

  36. final def synchronized[T0](arg0: ⇒ T0): T0
    Definition Classes
    AnyRef
  37. def toString(): String
    Definition Classes
    AnyRef → Any
  38. final def wait(): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  39. final def wait(arg0: Long, arg1: Int): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  40. final def wait(arg0: Long): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... ) @native()

Inherited from Serializable

Inherited from Serializable

Inherited from AnyRef

Inherited from Any

Ungrouped