Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ import org.apache.hadoop.hive.ql.io.{HiveFileFormatUtils, HiveOutputFormat}
import org.apache.hadoop.hive.ql.plan.{PlanUtils, TableDesc}
import org.apache.hadoop.io.Writable
import org.apache.hadoop.mapred._
import org.apache.hadoop.hive.common.FileUtils

import org.apache.spark.mapred.SparkHadoopMapRedUtil
import org.apache.spark.sql.Row
Expand Down Expand Up @@ -212,9 +213,14 @@ private[spark] class SparkHiveDynamicPartitionWriterContainer(
.zip(row.toSeq.takeRight(dynamicPartColNames.length))
.map { case (col, rawVal) =>
val string = if (rawVal == null) null else String.valueOf(rawVal)
s"/$col=${if (string == null || string.isEmpty) defaultPartName else string}"
}
.mkString
val colString =
if (string == null || string.isEmpty) {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can you move this computation out of string interpolation into a variable. Its kinda odd to have a multi-line string thats not really multi line.

defaultPartName
} else {
FileUtils.escapePathName(string)
}
s"/$col=$colString"
}.mkString

def newWriter = {
val newFileSinkDesc = new FileSinkDesc(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -854,6 +854,22 @@ class HiveQuerySuite extends HiveComparisonTest with BeforeAndAfter {
}
}

test("SPARK-5592: get java.net.URISyntaxException when dynamic partitioning") {
sql("""
|create table sc as select *
|from (select '2011-01-11', '2011-01-11+14:18:26' from src tablesample (1 rows)
|union all
|select '2011-01-11', '2011-01-11+15:18:26' from src tablesample (1 rows)
|union all
|select '2011-01-11', '2011-01-11+16:18:26' from src tablesample (1 rows) ) s
""".stripMargin)
sql("create table sc_part (key string) partitioned by (ts string) stored as rcfile")
sql("set hive.exec.dynamic.partition=true")
sql("set hive.exec.dynamic.partition.mode=nonstrict")
sql("insert overwrite table sc_part partition(ts) select * from sc")
sql("drop table sc_part")
}

test("Partition spec validation") {
sql("DROP TABLE IF EXISTS dp_test")
sql("CREATE TABLE dp_test(key INT, value STRING) PARTITIONED BY (dp INT, sp INT)")
Expand Down