18
18
*/
19
19
package org .apache .iceberg .spark ;
20
20
21
+ import static org .apache .iceberg .CatalogProperties .CATALOG_IMPL ;
22
+ import static org .apache .iceberg .CatalogUtil .ICEBERG_CATALOG_TYPE ;
23
+ import static org .apache .iceberg .CatalogUtil .ICEBERG_CATALOG_TYPE_HADOOP ;
24
+ import static org .apache .iceberg .CatalogUtil .ICEBERG_CATALOG_TYPE_HIVE ;
25
+ import static org .apache .iceberg .CatalogUtil .ICEBERG_CATALOG_TYPE_REST ;
21
26
import static org .assertj .core .api .Assertions .assertThat ;
22
27
23
28
import java .io .File ;
36
41
import org .apache .iceberg .catalog .SupportsNamespaces ;
37
42
import org .apache .iceberg .catalog .TableIdentifier ;
38
43
import org .apache .iceberg .hadoop .HadoopCatalog ;
44
+ import org .apache .iceberg .inmemory .InMemoryCatalog ;
45
+ import org .apache .iceberg .rest .RESTCatalog ;
46
+ import org .apache .iceberg .rest .RESTCatalogServer ;
47
+ import org .apache .iceberg .rest .RESTServerExtension ;
39
48
import org .apache .iceberg .util .PropertyUtil ;
40
49
import org .junit .jupiter .api .AfterAll ;
41
50
import org .junit .jupiter .api .BeforeAll ;
42
51
import org .junit .jupiter .api .BeforeEach ;
43
52
import org .junit .jupiter .api .extension .ExtendWith ;
53
+ import org .junit .jupiter .api .extension .RegisterExtension ;
44
54
import org .junit .jupiter .api .io .TempDir ;
45
55
46
56
@ ExtendWith (ParameterizedTestExtension .class )
47
57
public abstract class TestBaseWithCatalog extends TestBase {
48
58
protected static File warehouse = null ;
49
59
60
+ @ RegisterExtension
61
+ private static final RESTServerExtension REST_SERVER_EXTENSION =
62
+ new RESTServerExtension (
63
+ Map .of (
64
+ RESTCatalogServer .REST_PORT ,
65
+ RESTServerExtension .FREE_PORT ,
66
+ // In-memory sqlite database by default is private to the connection that created it.
67
+ // If more than 1 jdbc connection backed by in-memory sqlite is created behind one
68
+ // JdbcCatalog, then different jdbc connections could provide different views of table
69
+ // status even belonging to the same catalog. Reference:
70
+ // https://www.sqlite.org/inmemorydb.html
71
+ CatalogProperties .CLIENT_POOL_SIZE ,
72
+ "1" ));
73
+
74
+ protected static RESTCatalog restCatalog ;
75
+
50
76
@ Parameters (name = "catalogName = {0}, implementation = {1}, config = {2}" )
51
77
protected static Object [][] parameters () {
52
78
return new Object [][] {
@@ -59,13 +85,14 @@ protected static Object[][] parameters() {
59
85
}
60
86
61
87
@ BeforeAll
62
- public static void createWarehouse () throws IOException {
88
+ public static void setUpAll () throws IOException {
63
89
TestBaseWithCatalog .warehouse = File .createTempFile ("warehouse" , null );
64
90
assertThat (warehouse .delete ()).isTrue ();
91
+ restCatalog = REST_SERVER_EXTENSION .client ();
65
92
}
66
93
67
94
@ AfterAll
68
- public static void dropWarehouse () throws IOException {
95
+ public static void tearDownAll () throws IOException {
69
96
if (warehouse != null && warehouse .exists ()) {
70
97
Path warehousePath = new Path (warehouse .getAbsolutePath ());
71
98
FileSystem fs = warehousePath .getFileSystem (hiveConf );
@@ -89,13 +116,37 @@ public static void dropWarehouse() throws IOException {
89
116
protected TableIdentifier tableIdent = TableIdentifier .of (Namespace .of ("default" ), "table" );
90
117
protected String tableName ;
91
118
119
+ private void configureValidationCatalog () {
120
+ if (catalogConfig .containsKey (ICEBERG_CATALOG_TYPE )) {
121
+ switch (catalogConfig .get (ICEBERG_CATALOG_TYPE )) {
122
+ case ICEBERG_CATALOG_TYPE_HADOOP :
123
+ this .validationCatalog =
124
+ new HadoopCatalog (spark .sessionState ().newHadoopConf (), "file:" + warehouse );
125
+ break ;
126
+ case ICEBERG_CATALOG_TYPE_REST :
127
+ this .validationCatalog = restCatalog ;
128
+ break ;
129
+ case ICEBERG_CATALOG_TYPE_HIVE :
130
+ this .validationCatalog = catalog ;
131
+ break ;
132
+ default :
133
+ throw new IllegalArgumentException ("Unknown catalog type" );
134
+ }
135
+ } else if (catalogConfig .containsKey (CATALOG_IMPL )) {
136
+ switch (catalogConfig .get (CATALOG_IMPL )) {
137
+ case "org.apache.iceberg.inmemory.InMemoryCatalog" :
138
+ this .validationCatalog = new InMemoryCatalog ();
139
+ break ;
140
+ default :
141
+ throw new IllegalArgumentException ("Unknown catalog impl" );
142
+ }
143
+ }
144
+ this .validationNamespaceCatalog = (SupportsNamespaces ) validationCatalog ;
145
+ }
146
+
92
147
@ BeforeEach
93
148
public void before () {
94
- this .validationCatalog =
95
- catalogName .equals ("testhadoop" )
96
- ? new HadoopCatalog (spark .sessionState ().newHadoopConf (), "file:" + warehouse )
97
- : catalog ;
98
- this .validationNamespaceCatalog = (SupportsNamespaces ) validationCatalog ;
149
+ configureValidationCatalog ();
99
150
100
151
spark .conf ().set ("spark.sql.catalog." + catalogName , implementation );
101
152
catalogConfig .forEach (
0 commit comments