object DeltaTable extends Serializable
:: Evolving ::
Companion object to create DeltaTable instances.
DeltaTable.forPath(sparkSession, pathToTheDeltaTable)
- Since
0.3.0
- Alphabetic
- By Inheritance
- DeltaTable
- Serializable
- Serializable
- AnyRef
- Any
- Hide All
- Show All
- Public
- All
Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
-
def
clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()
-
def
convertToDelta(spark: SparkSession, identifier: String): DeltaTable
:: Evolving ::
:: Evolving ::
Create a DeltaTable from the given parquet table. Takes an existing parquet table and constructs a delta transaction log in the base path of the table.
Note: Any changes to the table during the conversion process may not result in a consistent state at the end of the conversion. Users should stop any changes to the table before the conversion is started.
An Example would be
io.delta.tables.DeltaTable.convertToDelta( spark, "parquet.`/path`"
- Annotations
- @Evolving()
- Since
0.4.0
-
def
convertToDelta(spark: SparkSession, identifier: String, partitionSchema: String): DeltaTable
:: Evolving ::
:: Evolving ::
Create a DeltaTable from the given parquet table and partition schema. Takes an existing parquet table and constructs a delta transaction log in the base path of that table.
Note: Any changes to the table during the conversion process may not result in a consistent state at the end of the conversion. Users should stop any changes to the table before the conversion is started.
An example usage would be
io.delta.tables.DeltaTable.convertToDelta( spark, "parquet.`/path`", "key1 long, key2 string")
- Annotations
- @Evolving()
- Since
0.4.0
-
def
convertToDelta(spark: SparkSession, identifier: String, partitionSchema: StructType): DeltaTable
:: Evolving ::
:: Evolving ::
Create a DeltaTable from the given parquet table and partition schema. Takes an existing parquet table and constructs a delta transaction log in the base path of that table.
Note: Any changes to the table during the conversion process may not result in a consistent state at the end of the conversion. Users should stop any changes to the table before the conversion is started.
An example usage would be
io.delta.tables.DeltaTable.convertToDelta( spark, "parquet.`/path`", new StructType().add(StructField("key1", LongType)).add(StructField("key2", StringType)))
- Annotations
- @Evolving()
- Since
0.4.0
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
equals(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
def
finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( classOf[java.lang.Throwable] )
-
def
forName(sparkSession: SparkSession, tableName: String): DeltaTable
Create a DeltaTable using the given table or view name using the given SparkSession.
-
def
forName(tableOrViewName: String): DeltaTable
Create a DeltaTable using the given table or view name using the given SparkSession.
Create a DeltaTable using the given table or view name using the given SparkSession.
Note: This uses the active SparkSession in the current thread to read the table data. Hence, this throws error if active SparkSession has not been set, that is,
SparkSession.getActiveSession()
is empty. -
def
forPath(sparkSession: SparkSession, path: String): DeltaTable
:: Evolving ::
:: Evolving ::
Create a DeltaTable for the data at the given
path
using the given SparkSession.- Annotations
- @Evolving()
- Since
0.3.0
-
def
forPath(path: String): DeltaTable
:: Evolving ::
:: Evolving ::
Create a DeltaTable for the data at the given
path
.Note: This uses the active SparkSession in the current thread to read the table data. Hence, this throws error if active SparkSession has not been set, that is,
SparkSession.getActiveSession()
is empty.- Annotations
- @Evolving()
- Since
0.3.0
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
def
hashCode(): Int
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
def
isDeltaTable(identifier: String): Boolean
:: Evolving ::
:: Evolving ::
Check if the provided
identifier
string, in this case a file path, is the root of a Delta table.Note: This uses the active SparkSession in the current thread to search for the table. Hence, this throws error if active SparkSession has not been set, that is,
SparkSession.getActiveSession()
is empty.An example would be
DeltaTable.isDeltaTable(spark, "/path/to/table")
- Annotations
- @Evolving()
- Since
0.4.0
-
def
isDeltaTable(sparkSession: SparkSession, identifier: String): Boolean
:: Evolving ::
:: Evolving ::
Check if the provided
identifier
string, in this case a file path, is the root of a Delta table using the given SparkSession.An example would be
DeltaTable.isDeltaTable(spark, "path/to/table")
- Annotations
- @Evolving()
- Since
0.4.0
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
-
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
-
def
toString(): String
- Definition Classes
- AnyRef → Any
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()