-
Notifications
You must be signed in to change notification settings - Fork 347
Closed
Description
How to use postgresql jdbc driver with spark-redshift?
following code give me exception
java.lang.IllegalArgumentException: Unsupported JDBC protocol: 'postgresql'
val df1: DataFrame = sqlContext.read
.format("com.databricks.spark.redshift")
.option("url", "jdbc:postgresql://host:5439/db?user=test&password=test")
.option("dbtable", "wdata")
.option("tempdir", "s3n://accessKEy:SecretKEy@redshift/dir/")
.load()
df1.show()Full stacktrace is as follow
Exception in thread "main" java.lang.IllegalArgumentException: Unsupported JDBC protocol: 'postgresql'
at com.databricks.spark.redshift.JDBCWrapper$$anonfun$getDriverClass$2.apply(RedshiftJDBCWrapper.scala:68)
at com.databricks.spark.redshift.JDBCWrapper$$anonfun$getDriverClass$2.apply(RedshiftJDBCWrapper.scala:52)
at scala.Option.getOrElse(Option.scala:120)
at com.databricks.spark.redshift.JDBCWrapper.getDriverClass(RedshiftJDBCWrapper.scala:51)
at com.databricks.spark.redshift.JDBCWrapper.getConnector(RedshiftJDBCWrapper.scala:138)
at com.databricks.spark.redshift.RedshiftRelation$$anonfun$schema$1.apply(RedshiftRelation.scala:59)
at com.databricks.spark.redshift.RedshiftRelation$$anonfun$schema$1.apply(RedshiftRelation.scala:56)
at scala.Option.getOrElse(Option.scala:120)
at com.databricks.spark.redshift.RedshiftRelation.schema$lzycompute(RedshiftRelation.scala:56)
at com.databricks.spark.redshift.RedshiftRelation.schema(RedshiftRelation.scala:55)
at org.apache.spark.sql.execution.datasources.LogicalRelation.<init>(LogicalRelation.scala:31)
at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:120)