File tree Expand file tree Collapse file tree 2 files changed +7
-9
lines changed
external/docker-integration-tests
src/test/scala/org/apache/spark/sql/jdbc Expand file tree Collapse file tree 2 files changed +7
-9
lines changed Original file line number Diff line number Diff line change 8080 <type >test-jar</type >
8181 <scope >test</scope >
8282 </dependency >
83+ <dependency >
84+ <groupId >org.apache.spark</groupId >
85+ <artifactId >spark-catalyst_${scala.binary.version}</artifactId >
86+ <version >${project.version} </version >
87+ <type >test-jar</type >
88+ <scope >test</scope >
89+ </dependency >
8390 <dependency >
8491 <groupId >org.apache.spark</groupId >
8592 <artifactId >spark-sql_${scala.binary.version}</artifactId >
Original file line number Diff line number Diff line change @@ -255,15 +255,6 @@ class OracleIntegrationSuite extends DockerJDBCIntegrationSuite with SharedSQLCo
255255 val df = dfRead.filter(dfRead.col(" date_type" ).lt(dt))
256256 .filter(dfRead.col(" timestamp_type" ).lt(ts))
257257
258- val metadata = df.queryExecution.sparkPlan.metadata
259- // The "PushedFilters" part should be exist in Datafrome's
260- // physical plan and the existence of right literals in
261- // "PushedFilters" is used to prove that the predicates
262- // pushing down have been effective.
263- assert(metadata.get(" PushedFilters" ).ne(None ))
264- assert(metadata(" PushedFilters" ).contains(dt.toString))
265- assert(metadata(" PushedFilters" ).contains(ts.toString))
266-
267258 val row = df.collect()(0 )
268259 assert(row.getDate(0 ).equals(dateVal))
269260 assert(row.getTimestamp(1 ).equals(timestampVal))
You can’t perform that action at this time.
0 commit comments