Skip to content

Commit

Permalink
Address comments
Browse files Browse the repository at this point in the history
  • Loading branch information
ajantha-bhat committed Jan 8, 2024
1 parent 50b23c9 commit e13e6d2
Show file tree
Hide file tree
Showing 9 changed files with 18 additions and 18 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -70,12 +70,12 @@ public void clearRewrite(Table table, String fileSetId) {

public Set<String> fetchSetIds(Table table) {
return resultMap.keySet().stream()
.filter(e -> e.first().equals(Spark3Util.tableUUID(table)))
.filter(e -> e.first().equals(Spark3Util.baseTableUUID(table)))
.map(Pair::second)
.collect(Collectors.toSet());
}

private Pair<String, String> toId(Table table, String setId) {
return Pair.of(Spark3Util.tableUUID(table), setId);
return Pair.of(Spark3Util.baseTableUUID(table), setId);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -62,12 +62,12 @@ public <T extends ScanTask> List<T> removeTasks(Table table, String setId) {

public Set<String> fetchSetIds(Table table) {
return tasksMap.keySet().stream()
.filter(e -> e.first().equals(Spark3Util.tableUUID(table)))
.filter(e -> e.first().equals(Spark3Util.baseTableUUID(table)))
.map(Pair::second)
.collect(Collectors.toSet());
}

private Pair<String, String> toId(Table table, String setId) {
return Pair.of(Spark3Util.tableUUID(table), setId);
return Pair.of(Spark3Util.baseTableUUID(table), setId);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -947,14 +947,14 @@ public static org.apache.spark.sql.catalyst.TableIdentifier toV1TableIdentifier(
return org.apache.spark.sql.catalyst.TableIdentifier.apply(table, database);
}

static String tableUUID(org.apache.iceberg.Table table) {
static String baseTableUUID(org.apache.iceberg.Table table) {
if (table instanceof HasTableOperations) {
TableOperations ops = ((HasTableOperations) table).operations();
return ops.current().uuid();
} else if (table instanceof BaseMetadataTable) {
return ((BaseMetadataTable) table).table().operations().current().uuid();
} else {
return null;
throw new UnsupportedOperationException("Cannot retrieve UUID for table " + table.name());
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,12 +70,12 @@ public void clearRewrite(Table table, String fileSetId) {

public Set<String> fetchSetIds(Table table) {
return resultMap.keySet().stream()
.filter(e -> e.first().equals(Spark3Util.tableUUID(table)))
.filter(e -> e.first().equals(Spark3Util.baseTableUUID(table)))
.map(Pair::second)
.collect(Collectors.toSet());
}

private Pair<String, String> toId(Table table, String setId) {
return Pair.of(Spark3Util.tableUUID(table), setId);
return Pair.of(Spark3Util.baseTableUUID(table), setId);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -62,12 +62,12 @@ public <T extends ScanTask> List<T> removeTasks(Table table, String setId) {

public Set<String> fetchSetIds(Table table) {
return tasksMap.keySet().stream()
.filter(e -> e.first().equals(Spark3Util.tableUUID(table)))
.filter(e -> e.first().equals(Spark3Util.baseTableUUID(table)))
.map(Pair::second)
.collect(Collectors.toSet());
}

private Pair<String, String> toId(Table table, String setId) {
return Pair.of(Spark3Util.tableUUID(table), setId);
return Pair.of(Spark3Util.baseTableUUID(table), setId);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -950,14 +950,14 @@ public static org.apache.spark.sql.catalyst.TableIdentifier toV1TableIdentifier(
return org.apache.spark.sql.catalyst.TableIdentifier.apply(table, database);
}

static String tableUUID(org.apache.iceberg.Table table) {
static String baseTableUUID(org.apache.iceberg.Table table) {
if (table instanceof HasTableOperations) {
TableOperations ops = ((HasTableOperations) table).operations();
return ops.current().uuid();
} else if (table instanceof BaseMetadataTable) {
return ((BaseMetadataTable) table).table().operations().current().uuid();
} else {
return null;
throw new UnsupportedOperationException("Cannot retrieve UUID for table " + table.name());
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,12 +70,12 @@ public void clearRewrite(Table table, String fileSetId) {

public Set<String> fetchSetIds(Table table) {
return resultMap.keySet().stream()
.filter(e -> e.first().equals(Spark3Util.tableUUID(table)))
.filter(e -> e.first().equals(Spark3Util.baseTableUUID(table)))
.map(Pair::second)
.collect(Collectors.toSet());
}

private Pair<String, String> toId(Table table, String setId) {
return Pair.of(Spark3Util.tableUUID(table), setId);
return Pair.of(Spark3Util.baseTableUUID(table), setId);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -62,12 +62,12 @@ public <T extends ScanTask> List<T> removeTasks(Table table, String setId) {

public Set<String> fetchSetIds(Table table) {
return tasksMap.keySet().stream()
.filter(e -> e.first().equals(Spark3Util.tableUUID(table)))
.filter(e -> e.first().equals(Spark3Util.baseTableUUID(table)))
.map(Pair::second)
.collect(Collectors.toSet());
}

private Pair<String, String> toId(Table table, String setId) {
return Pair.of(Spark3Util.tableUUID(table), setId);
return Pair.of(Spark3Util.baseTableUUID(table), setId);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -950,14 +950,14 @@ public static org.apache.spark.sql.catalyst.TableIdentifier toV1TableIdentifier(
return org.apache.spark.sql.catalyst.TableIdentifier.apply(table, database);
}

static String tableUUID(org.apache.iceberg.Table table) {
static String baseTableUUID(org.apache.iceberg.Table table) {
if (table instanceof HasTableOperations) {
TableOperations ops = ((HasTableOperations) table).operations();
return ops.current().uuid();
} else if (table instanceof BaseMetadataTable) {
return ((BaseMetadataTable) table).table().operations().current().uuid();
} else {
return null;
throw new UnsupportedOperationException("Cannot retrieve UUID for table " + table.name());
}
}

Expand Down

0 comments on commit e13e6d2

Please sign in to comment.