-
Notifications
You must be signed in to change notification settings - Fork 290
/
Copy pathDateTime.scala
72 lines (62 loc) · 1.82 KB
/
DateTime.scala
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
package sql
import java.sql.{Date, Timestamp}
import org.apache.spark.sql.{Row, SparkSession}
import org.apache.spark.sql.types._
import org.apache.spark.{SparkConf, SparkContext}
//
// One way to create a DataFrame containing dates and timestamps, and
// query for ranges thereof.
//
object DateTime {
def main (args: Array[String]) {
val spark =
SparkSession.builder()
.appName("SQL-DateTime")
.master("local[4]")
.getOrCreate()
val schema = StructType(
Seq(
StructField("id", IntegerType, true),
StructField("dt", DateType, true),
StructField("ts", TimestampType, true)
)
)
val rows = spark.sparkContext.parallelize(
Seq(
Row(
1,
Date.valueOf("2000-01-11"),
Timestamp.valueOf("2011-10-02 09:48:05.123456")
),
Row(
1,
Date.valueOf("2004-04-14"),
Timestamp.valueOf("2011-10-02 12:30:00.123456")
),
Row(
1,
Date.valueOf("2008-12-31"),
Timestamp.valueOf("2011-10-02 15:00:00.123456")
)
), 4)
val tdf = spark.createDataFrame(rows, schema)
tdf.printSchema()
tdf.createOrReplaceTempView("dates_times")
println("*** Here's the whole table")
spark.sql("SELECT * FROM dates_times").show()
println("*** Query for a date range")
spark.sql(
s"""
| SELECT * FROM dates_times
| WHERE dt > cast('2002-01-01' as date)
| AND dt < cast('2006-01-01' as date)
""".stripMargin).show()
println("*** Query to skip a timestamp range")
spark.sql(
s"""
| SELECT * FROM dates_times
| WHERE ts < cast('2011-10-02 12:00:00' as timestamp)
| OR ts > cast('2011-10-02 13:00:00' as timestamp)
""".stripMargin).show()
}
}