Skip to content

Commit

Permalink
fix: Spark table format vs file format (#1644)
Browse files Browse the repository at this point in the history
  • Loading branch information
barakalon authored May 18, 2023
1 parent f93f5cf commit 6d04523
Show file tree
Hide file tree
Showing 3 changed files with 10 additions and 5 deletions.
2 changes: 0 additions & 2 deletions sqlglot/dialects/hive.py
Original file line number Diff line number Diff line change
Expand Up @@ -348,7 +348,6 @@ class Generator(generator.Generator):
exp.StrToTime: _str_to_time_sql,
exp.StrToUnix: _str_to_unix_sql,
exp.StructExtract: struct_extract_sql,
exp.TableFormatProperty: lambda self, e: f"USING {self.sql(e, 'this')}",
exp.TimeStrToDate: rename_func("TO_DATE"),
exp.TimeStrToTime: timestrtotime_sql,
exp.TimeStrToUnix: rename_func("UNIX_TIMESTAMP"),
Expand Down Expand Up @@ -376,7 +375,6 @@ class Generator(generator.Generator):
**generator.Generator.PROPERTIES_LOCATION, # type: ignore
exp.FileFormatProperty: exp.Properties.Location.POST_SCHEMA,
exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA,
exp.TableFormatProperty: exp.Properties.Location.POST_SCHEMA,
exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED,
}

Expand Down
2 changes: 1 addition & 1 deletion sqlglot/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -640,7 +640,7 @@ class Parser(metaclass=_Parser):
"TEMPORARY": lambda self: self._parse_temporary(global_=False),
"TRANSIENT": lambda self: self.expression(exp.TransientProperty),
"TTL": lambda self: self._parse_ttl(),
"USING": lambda self: self._parse_property_assignment(exp.TableFormatProperty),
"USING": lambda self: self._parse_property_assignment(exp.FileFormatProperty),
"VOLATILE": lambda self: self._parse_volatile_property(),
"WITH": lambda self: self._parse_with_property(),
}
Expand Down
11 changes: 9 additions & 2 deletions tests/dialects/test_spark.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,8 +39,8 @@ def test_ddl(self):
"CREATE TABLE x USING ICEBERG PARTITIONED BY (MONTHS(y)) LOCATION 's3://z'",
write={
"duckdb": "CREATE TABLE x",
"presto": "CREATE TABLE x WITH (TABLE_FORMAT='ICEBERG', PARTITIONED_BY=ARRAY['MONTHS'])",
"hive": "CREATE TABLE x USING ICEBERG PARTITIONED BY (MONTHS(y)) LOCATION 's3://z'",
"presto": "CREATE TABLE x WITH (FORMAT='ICEBERG', PARTITIONED_BY=ARRAY['MONTHS'])",
"hive": "CREATE TABLE x STORED AS ICEBERG PARTITIONED BY (MONTHS(y)) LOCATION 's3://z'",
"spark": "CREATE TABLE x USING ICEBERG PARTITIONED BY (MONTHS(y)) LOCATION 's3://z'",
},
)
Expand Down Expand Up @@ -113,6 +113,13 @@ def test_ddl(self):
"spark": "ALTER TABLE StudentInfo DROP COLUMNS (LastName, DOB)",
},
)
self.validate_all(
"CREATE TABLE x USING ICEBERG PARTITIONED BY (MONTHS(y)) LOCATION 's3://z'",
identify=True,
write={
"spark": "CREATE TABLE `x` USING ICEBERG PARTITIONED BY (MONTHS(`y`)) LOCATION 's3://z'",
},
)

def test_to_date(self):
self.validate_all(
Expand Down

0 comments on commit 6d04523

Please sign in to comment.