import sqlContext.implicits._
val df = Seq(
(1, "First Value"),
(2, "Second Value")
).toDF("int_column", "string_column")
import org.apache.spark.sql.functions.{unix_millis}
import org.apache.spark.sql.functions.col
df.select(unix_millis(col("int_column"))).show()
command-1862359:7: error: value unix_millis is not a member of object org.apache.spark.sql.functions
import org.apache.spark.sql.functions.{unix_millis}
^
command-1862359:9: error: not found: value unix_millis
df.select(unix_millis(col("int_column"))).show()
^
import org.apache.spark.sql.functions._
import sqlContext.implicits._
val jdf = Seq(
(1, "First Value"),
(2, "Second Value")
).toDF("int_column", "string_column")
import org.apache.spark.sql.functions._
import sqlContext.implicits._
jdf: org.apache.spark.sql.DataFrame = [int_column: int, string_column: string]
import org.apache.spark.sql.functions._
import sqlContext.implicits._
val ldf = Seq(
(1, "First Value"),
(2, "Second Value")
).toDF("int_column", "string_column")
import org.apache.spark.sql.functions._
import sqlContext.implicits._
ldf: org.apache.spark.sql.DataFrame = [int_column: int, string_column: string]
The functions
timestamp_millis
andunix_millis
are not available in the DataFrame API.These functions are specific to SQL and are included in Spark 3.1.1 and above.