diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml
new file mode 100644
index 0000000000..325daaa4b5
--- /dev/null
+++ b/.github/.OwlBot.lock.yaml
@@ -0,0 +1,3 @@
+docker:
+ image: gcr.io/cloud-devrel-public-resources/owlbot-java:latest
+ digest: sha256:204b7af96e6d481f19b0ff377aa379d46bc56dd06e1cc7c523f361dd9cbfeeaa
diff --git a/.github/readme/synth.py b/.github/.OwlBot.yaml
similarity index 73%
rename from .github/readme/synth.py
rename to .github/.OwlBot.yaml
index 7b48cc28d3..54aca963f7 100644
--- a/.github/readme/synth.py
+++ b/.github/.OwlBot.yaml
@@ -1,4 +1,4 @@
-# Copyright 2020 Google LLC
+# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -12,8 +12,5 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-"""This script is used to synthesize generated the README for this library."""
-
-from synthtool.languages import java
-
-java.custom_templates(["java_library/README.md"])
+docker:
+ image: "gcr.io/cloud-devrel-public-resources/owlbot-java:latest"
\ No newline at end of file
diff --git a/.github/blunderbuss.yml b/.github/blunderbuss.yml
index 1a23ea42b1..2176b05432 100644
--- a/.github/blunderbuss.yml
+++ b/.github/blunderbuss.yml
@@ -1,5 +1,5 @@
# Configuration for the Blunderbuss GitHub app. For more info see
-# https://github.com/googleapis/repo-automation-bots/tree/master/packages/blunderbuss
+# https://github.com/googleapis/repo-automation-bots/tree/main/packages/blunderbuss
assign_prs_by:
- labels:
- samples
diff --git a/.github/release-please.yml b/.github/release-please.yml
index 3ecab44349..307b69962f 100644
--- a/.github/release-please.yml
+++ b/.github/release-please.yml
@@ -1,8 +1,10 @@
releaseType: java-yoshi
bumpMinorPreMajor: true
extraFiles:
- - bigtable-client-core-parent/bigtable-client-core/src/main/java/com/google/cloud/bigtable/config/BigtableVersionInfo.java
- - bigtable-client-core-parent/bigtable-hbase/src/main/java/com/google/cloud/bigtable/hbase/BigtableHBaseVersion.java
+ - >-
+ bigtable-client-core-parent/bigtable-client-core/src/main/java/com/google/cloud/bigtable/config/BigtableVersionInfo.java
+ - >-
+ bigtable-client-core-parent/bigtable-hbase/src/main/java/com/google/cloud/bigtable/hbase/BigtableHBaseVersion.java
handleGHRelease: true
branches:
- branch: bigtable-1.x
@@ -10,9 +12,20 @@ branches:
bumpMinorPreMajor: true
handleGHRelease: true
extraFiles:
- - bigtable-client-core-parent/bigtable-client-core/src/main/java/com/google/cloud/bigtable/config/BigtableVersionInfo.java
- - bigtable-client-core-parent/bigtable-hbase/src/main/java/com/google/cloud/bigtable/hbase/BigtableHBaseVersion.java
+ - >-
+ bigtable-client-core-parent/bigtable-client-core/src/main/java/com/google/cloud/bigtable/config/BigtableVersionInfo.java
+ - >-
+ bigtable-client-core-parent/bigtable-hbase/src/main/java/com/google/cloud/bigtable/hbase/BigtableHBaseVersion.java
- releaseType: java-lts
bumpMinorPreMajor: true
handleGHRelease: true
branch: 1.20.0-sp
+ - releaseType: java-lts
+ bumpMinorPreMajor: true
+ extraFiles:
+ - >-
+ bigtable-client-core-parent/bigtable-client-core/src/main/java/com/google/cloud/bigtable/config/BigtableVersionInfo.java
+ - >-
+ bigtable-client-core-parent/bigtable-hbase/src/main/java/com/google/cloud/bigtable/hbase/BigtableHBaseVersion.java
+ handleGHRelease: true
+ branch: 1.25.2-sp
diff --git a/.github/sync-repo-settings.yaml b/.github/sync-repo-settings.yaml
index 4ed9335fe3..de47050a03 100644
--- a/.github/sync-repo-settings.yaml
+++ b/.github/sync-repo-settings.yaml
@@ -2,7 +2,7 @@ rebaseMergeAllowed: false
squashMergeAllowed: true
mergeCommitAllowed: false
branchProtectionRules:
- - pattern: master
+ - pattern: main
isAdminEnforced: true
requiredStatusCheckContexts:
- 'Kokoro - Test: Binary Compatibility'
@@ -13,6 +13,7 @@ branchProtectionRules:
- 'Kokoro - Test: Java 8'
- 'Kokoro - Test: Beam Integration'
- cla/google
+ - OwlBot Post Processor
requiredApprovingReviewCount: 1
requiresCodeOwnerReviews: true
- pattern: 1.20.0-sp
@@ -26,6 +27,7 @@ branchProtectionRules:
- 'Kokoro - Test: Java 8'
- 'Kokoro - Test: Beam Integration'
- cla/google
+ - OwlBot Post Processor
requiredApprovingReviewCount: 1
requiresCodeOwnerReviews: true
- pattern: bigtable-1.x
@@ -39,6 +41,21 @@ branchProtectionRules:
- 'Kokoro - Test: Java 8'
- 'Kokoro - Test: Beam Integration'
- cla/google
+ - OwlBot Post Processor
+ requiredApprovingReviewCount: 1
+ requiresCodeOwnerReviews: true
+ - pattern: 1.25.2-sp
+ isAdminEnforced: true
+ requiredStatusCheckContexts:
+ - 'Kokoro - Test: Binary Compatibility'
+ - 'Kokoro - Test: Code Format'
+ - 'Kokoro - Test: Dependencies'
+ - 'Kokoro - Test: Integration'
+ - 'Kokoro - Test: Java 11'
+ - 'Kokoro - Test: Java 8'
+ - 'Kokoro - Test: Beam Integration'
+ - cla/google
+ - OwlBot Post Processor
requiredApprovingReviewCount: 1
requiresCodeOwnerReviews: true
permissionRules:
diff --git a/.github/trusted-contribution.yml b/.github/trusted-contribution.yml
index f247d5c789..a0ba1f7d90 100644
--- a/.github/trusted-contribution.yml
+++ b/.github/trusted-contribution.yml
@@ -1,2 +1,3 @@
trustedContributors:
-- renovate-bot
\ No newline at end of file
+- renovate-bot
+- gcf-owl-bot[bot]
diff --git a/.kokoro/continuous/readme.cfg b/.kokoro/continuous/readme.cfg
deleted file mode 100644
index cd53028064..0000000000
--- a/.kokoro/continuous/readme.cfg
+++ /dev/null
@@ -1,55 +0,0 @@
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Format: //devtools/kokoro/config/proto/build.proto
-
-env_vars: {
- key: "TRAMPOLINE_IMAGE"
- value: "gcr.io/cloud-devrel-kokoro-resources/python-multi"
-}
-
-env_vars: {
- key: "TRAMPOLINE_BUILD_FILE"
- value: "github/java-bigtable-hbase/.kokoro/readme.sh"
-}
-
-# Build logs will be here
-action {
- define_artifacts {
- regex: "**/*sponge_log.xml"
- regex: "**/*sponge_log.log"
- }
-}
-
-# The github token is stored here.
-before_action {
- fetch_keystore {
- keystore_resource {
- keystore_config_id: 73713
- keyname: "yoshi-automation-github-key"
- # TODO(theacodes): remove this after secrets have globally propagated
- backend_type: FASTCONFIGPUSH
- }
- }
-}
-
-# Common env vars for all repositories and builds.
-env_vars: {
- key: "GITHUB_USER"
- value: "yoshi-automation"
-}
-env_vars: {
- key: "GITHUB_EMAIL"
- value: "yoshi-automation@google.com"
-}
diff --git a/.kokoro/nightly/integration-beam.cfg b/.kokoro/nightly/integration-beam.cfg
index e32e1205b7..30f49aa020 100644
--- a/.kokoro/nightly/integration-beam.cfg
+++ b/.kokoro/nightly/integration-beam.cfg
@@ -8,7 +8,7 @@ env_vars: {
env_vars: {
key: "INTEGRATION_TEST_ARGS"
- value: "-PbeamIntegrationTest -Dgoogle.bigtable.project.id=gcloud-devel -Dgoogle.bigtable.instance.id=google-cloud-bigtable -Dgoogle.dataflow.work-dir=gs://java-bigtable-hbase-testing/work-dir -Dcloud.test.data.folder=gs://java-bigtable-hbase-testing/hbase-snapshot-import-integration-tests -Dregion=us-central1"
+ value: "-PbeamIntegrationTest,bigtableDataflowIntegrationTest -Dgoogle.bigtable.project.id=gcloud-devel -Dgoogle.bigtable.instance.id=google-cloud-bigtable -Dgoogle.dataflow.work-dir=gs://java-bigtable-hbase-testing/work-dir -Dcloud.test.data.folder=gs://java-bigtable-hbase-testing/hbase-snapshot-import-integration-tests -Dregion=us-central1"
}
env_vars: {
diff --git a/.kokoro/presubmit/graalvm-native.cfg b/.kokoro/presubmit/graalvm-native.cfg
new file mode 100644
index 0000000000..4c7225ec92
--- /dev/null
+++ b/.kokoro/presubmit/graalvm-native.cfg
@@ -0,0 +1,33 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/graalvm"
+}
+
+env_vars: {
+ key: "JOB_TYPE"
+ value: "graalvm"
+}
+
+# TODO: remove this after we've migrated all tests and scripts
+env_vars: {
+ key: "GCLOUD_PROJECT"
+ value: "gcloud-devel"
+}
+
+env_vars: {
+ key: "GOOGLE_CLOUD_PROJECT"
+ value: "gcloud-devel"
+}
+
+env_vars: {
+ key: "GOOGLE_APPLICATION_CREDENTIALS"
+ value: "secret_manager/java-it-service-account"
+}
+
+env_vars: {
+ key: "SECRET_MANAGER_KEYS"
+ value: "java-it-service-account"
+}
diff --git a/.kokoro/presubmit/integration-beam.cfg b/.kokoro/presubmit/integration-beam.cfg
index e32e1205b7..30f49aa020 100644
--- a/.kokoro/presubmit/integration-beam.cfg
+++ b/.kokoro/presubmit/integration-beam.cfg
@@ -8,7 +8,7 @@ env_vars: {
env_vars: {
key: "INTEGRATION_TEST_ARGS"
- value: "-PbeamIntegrationTest -Dgoogle.bigtable.project.id=gcloud-devel -Dgoogle.bigtable.instance.id=google-cloud-bigtable -Dgoogle.dataflow.work-dir=gs://java-bigtable-hbase-testing/work-dir -Dcloud.test.data.folder=gs://java-bigtable-hbase-testing/hbase-snapshot-import-integration-tests -Dregion=us-central1"
+ value: "-PbeamIntegrationTest,bigtableDataflowIntegrationTest -Dgoogle.bigtable.project.id=gcloud-devel -Dgoogle.bigtable.instance.id=google-cloud-bigtable -Dgoogle.dataflow.work-dir=gs://java-bigtable-hbase-testing/work-dir -Dcloud.test.data.folder=gs://java-bigtable-hbase-testing/hbase-snapshot-import-integration-tests -Dregion=us-central1"
}
env_vars: {
diff --git a/.kokoro/release/common.sh b/.kokoro/release/common.sh
index 6e3f65999b..7f78ee414f 100755
--- a/.kokoro/release/common.sh
+++ b/.kokoro/release/common.sh
@@ -1,5 +1,5 @@
#!/bin/bash
-# Copyright 2018 Google Inc.
+# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/.kokoro/release/drop.sh b/.kokoro/release/drop.sh
index 5c4551efa2..742ec1a886 100755
--- a/.kokoro/release/drop.sh
+++ b/.kokoro/release/drop.sh
@@ -1,5 +1,5 @@
#!/bin/bash
-# Copyright 2018 Google Inc.
+# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/.kokoro/release/promote.sh b/.kokoro/release/promote.sh
index 1fa95fa537..3cac3d8a97 100755
--- a/.kokoro/release/promote.sh
+++ b/.kokoro/release/promote.sh
@@ -1,5 +1,5 @@
#!/bin/bash
-# Copyright 2018 Google Inc.
+# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/.kokoro/release/publish_javadoc.sh b/.kokoro/release/publish_javadoc.sh
index e890345185..8f255297cb 100755
--- a/.kokoro/release/publish_javadoc.sh
+++ b/.kokoro/release/publish_javadoc.sh
@@ -1,5 +1,5 @@
#!/bin/bash
-# Copyright 2019 Google Inc.
+# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/.kokoro/release/publish_javadoc11.sh b/.kokoro/release/publish_javadoc11.sh
index 74878eb19c..d63c21ec0e 100755
--- a/.kokoro/release/publish_javadoc11.sh
+++ b/.kokoro/release/publish_javadoc11.sh
@@ -1,5 +1,5 @@
#!/bin/bash
-# Copyright 2021 Google Inc.
+# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,13 +36,9 @@ mvn clean install -B -q -DskipTests=true
export NAME=bigtable-client-parent
export VERSION=$(grep ${NAME}: versions.txt | cut -d: -f3)
-# V3 generates docfx yml from javadoc
-# generate yml
-mvn clean site -B -q -P docFX
-
-# copy README to docfx-yml dir and rename index.md
-cp README.md target/docfx-yml/index.md
-# copy CHANGELOG to docfx-yml dir and rename history.md
+# cloud RAD generation
+mvn clean javadoc:aggregate -B -q -P docFX
+# include CHANGELOG
cp CHANGELOG.md target/docfx-yml/history.md
pushd target/docfx-yml
diff --git a/.kokoro/release/stage.sh b/.kokoro/release/stage.sh
index 8a1033843c..77dc4e8f0f 100755
--- a/.kokoro/release/stage.sh
+++ b/.kokoro/release/stage.sh
@@ -1,5 +1,5 @@
#!/bin/bash
-# Copyright 2018 Google Inc.
+# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/.kokoro/trampoline.sh b/.kokoro/trampoline.sh
index 9da0f83987..8b69b793c9 100644
--- a/.kokoro/trampoline.sh
+++ b/.kokoro/trampoline.sh
@@ -1,5 +1,5 @@
#!/bin/bash
-# Copyright 2018 Google Inc.
+# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/.repo-metadata.json b/.repo-metadata.json
index 123e841eb5..1a8de23ead 100644
--- a/.repo-metadata.json
+++ b/.repo-metadata.json
@@ -10,5 +10,6 @@
"repo_short": "java-bigtable-hbase",
"distribution_name": "com.google.cloud.bigtable:bigtable-client-parent",
"api_id": "bigtable.googleapis.com",
+ "library_type": "OTHER",
"codeowner_team": "@googleapis/api-bigtable"
}
\ No newline at end of file
diff --git a/CHANGELOG.md b/CHANGELOG.md
index c7d9583037..b0a91f0a73 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,126 @@
# Changelog
+## [2.0.0-beta6](https://www.github.com/googleapis/java-bigtable-hbase/compare/v2.0.0-beta5...v2.0.0-beta6) (2022-01-06)
+
+**Note: This beta release is a work-in-progress. For the latest stable release of java-bigtable-hbase, please refer to version [1.26.2](https://github.com/googleapis/java-bigtable-hbase/releases/tag/v1.26.2).**
+
+### Miscellaneous Chores
+
+* bump tag to 2.0.0-beta6 ([#3407](https://www.github.com/googleapis/java-bigtable-hbase/issues/3407)) ([5d2ab98](https://www.github.com/googleapis/java-bigtable-hbase/commit/5d2ab98309b62ee2268dbe106002e62a7829c61c))
+
+
+### Dependencies
+
+* log4j 2.17.0 ([#3404](https://www.github.com/googleapis/java-bigtable-hbase/issues/3404)) ([08a5ebc](https://www.github.com/googleapis/java-bigtable-hbase/commit/08a5ebc4c9f15fb98d74171ca7450d43b4f2f3f1))
+* update beam.version to v2.35.0 ([#3420](https://www.github.com/googleapis/java-bigtable-hbase/issues/3420)) ([be2c629](https://www.github.com/googleapis/java-bigtable-hbase/commit/be2c629c615a4fd69cc763c9920273b1c145d74b))
+* update dependency net.bytebuddy:byte-buddy to v1.12.6 ([#3348](https://www.github.com/googleapis/java-bigtable-hbase/issues/3348)) ([6835f7b](https://www.github.com/googleapis/java-bigtable-hbase/commit/6835f7bffd6c5776c1b4310e4bc372fddd5b9864))
+* update jmh.version to v1.34 ([#3415](https://www.github.com/googleapis/java-bigtable-hbase/issues/3415)) ([16077d2](https://www.github.com/googleapis/java-bigtable-hbase/commit/16077d2fa53599bf9de1a7a9e90200d0355fb910))
+* use logback for 1.x tools module ([#3412](https://www.github.com/googleapis/java-bigtable-hbase/issues/3412)) ([cde3f14](https://www.github.com/googleapis/java-bigtable-hbase/commit/cde3f149f4e04042f02e3f783c199adc517194a4))
+
+## [2.0.0-beta5](https://www.github.com/googleapis/java-bigtable-hbase/compare/v2.0.0-beta4...v2.0.0-beta5) (2021-12-15)
+
+**Note: This beta release is a work-in-progress. For the latest stable release of java-bigtable-hbase, please refer to version [1.26.1](https://github.com/googleapis/java-bigtable-hbase/releases/tag/v1.26.1).**
+
+### Dependencies
+
+* remove explicit log4j2 deps on beam artifacts as they are no longer brought in transitively ([#3394](https://www.github.com/googleapis/java-bigtable-hbase/issues/3394)) ([2dafd59](https://www.github.com/googleapis/java-bigtable-hbase/commit/2dafd599754d1c10bf03f7986da2de34fae4adf1))
+
+## [2.0.0-beta4](https://www.github.com/googleapis/java-bigtable-hbase/compare/v2.0.0-beta3...v2.0.0-beta4) (2021-12-14)
+
+
+### Miscellaneous Chores
+
+* make next tag 2.0.0-beta4 ([#3387](https://www.github.com/googleapis/java-bigtable-hbase/issues/3387)) ([01010e9](https://www.github.com/googleapis/java-bigtable-hbase/commit/01010e9975c7c13130980e8837e8dbce2b1cd705))
+
+
+### Dependencies
+
+* **fix:** bump log4j2 to 2.16.0 and ban all 2.x.x versions which are < 2.16.0 ([#3388](https://www.github.com/googleapis/java-bigtable-hbase/issues/3388)) ([265f042](https://www.github.com/googleapis/java-bigtable-hbase/commit/265f042191bc79609e19c53a9624e8e25c589608))
+
+## [2.0.0-beta3](https://www.github.com/googleapis/java-bigtable-hbase/compare/v2.0.0-beta2...v2.0.0-beta3) (2021-12-10)
+
+**Note: This beta release is a work-in-progress. For the latest stable release of java-bigtable-hbase, please refer to version [1.26.0](https://github.com/googleapis/java-bigtable-hbase/releases/tag/v1.26.0).**
+
+### Bug Fixes
+
+* dynamically load BigtableAdmin and BigtableAsyncAdmin ([#3341](https://www.github.com/googleapis/java-bigtable-hbase/issues/3341)) ([18b2e18](https://www.github.com/googleapis/java-bigtable-hbase/commit/18b2e18b71d0ce8ab437f9d492d8c30b498727ba))
+* fix flow controller setting and option javadocs ([#3338](https://www.github.com/googleapis/java-bigtable-hbase/issues/3338)) ([2d62e34](https://www.github.com/googleapis/java-bigtable-hbase/commit/2d62e348962246a9ab42d46039cd067418f384a8))
+
+
+### Documentation
+
+* **fix:** Fix paths in HBase tools readme ([#3345](https://www.github.com/googleapis/java-bigtable-hbase/issues/3345)) ([09ba0f3](https://www.github.com/googleapis/java-bigtable-hbase/commit/09ba0f3d19543a68dede6f72bc728d87daa4ffb5))
+
+
+### Dependencies
+
+* beam 2.33.0 ([#3314](https://www.github.com/googleapis/java-bigtable-hbase/issues/3314)) ([08b4da4](https://www.github.com/googleapis/java-bigtable-hbase/commit/08b4da406f7cac110cfe8a675f0f0804c03a9684))
+* migrate to log4j-core ([#3326](https://www.github.com/googleapis/java-bigtable-hbase/issues/3326)) ([001df8e](https://www.github.com/googleapis/java-bigtable-hbase/commit/001df8e9504189001b158f92b9d882e30cc56176))
+* update beam.version to v2.34.0 ([#3333](https://www.github.com/googleapis/java-bigtable-hbase/issues/3333)) ([3bfc672](https://www.github.com/googleapis/java-bigtable-hbase/commit/3bfc672adc188cd0b25aca038b304b820db6e486))
+* update dependency com.google.cloud:google-cloud-bigtable-emulator to v0.139.0 ([#3302](https://www.github.com/googleapis/java-bigtable-hbase/issues/3302)) ([322d4f9](https://www.github.com/googleapis/java-bigtable-hbase/commit/322d4f94a1251e50b1cbed333e7d6f0d6d9e18b1))
+* update dependency com.google.cloud:google-cloud-bigtable-emulator to v0.140.0 ([#3350](https://www.github.com/googleapis/java-bigtable-hbase/issues/3350)) ([386f1c5](https://www.github.com/googleapis/java-bigtable-hbase/commit/386f1c50ce44688e05dd58de0ae68bdfdc83af45))
+* update dependency com.google.cloud:google-cloud-bigtable-emulator to v0.140.1 ([#3355](https://www.github.com/googleapis/java-bigtable-hbase/issues/3355)) ([1ecd396](https://www.github.com/googleapis/java-bigtable-hbase/commit/1ecd396c20b0a095a21d863cfc50f84842d1b116))
+* update dependency com.google.cloud:google-cloud-bigtable-emulator to v0.141.0 ([#3368](https://www.github.com/googleapis/java-bigtable-hbase/issues/3368)) ([dfe3f17](https://www.github.com/googleapis/java-bigtable-hbase/commit/dfe3f171a08767395bb0518eaac15f55bd5eb9d7))
+* update dependency com.google.errorprone:error_prone_annotations to v2.10.0 ([#3330](https://www.github.com/googleapis/java-bigtable-hbase/issues/3330)) ([387073f](https://www.github.com/googleapis/java-bigtable-hbase/commit/387073f02b4555bdec0743bd5638831493aad0bc))
+* update dependency com.google.guava:guava ([#3298](https://www.github.com/googleapis/java-bigtable-hbase/issues/3298)) ([6b0f56e](https://www.github.com/googleapis/java-bigtable-hbase/commit/6b0f56eb47e01c9c0c11dbc5e3f69bd119e30a6a))
+* update log4j2.version to v2.15.0 ([#3373](https://www.github.com/googleapis/java-bigtable-hbase/issues/3373)) ([8eef232](https://www.github.com/googleapis/java-bigtable-hbase/commit/8eef232fba19969368015b066bd88cbdc55f0f2b))
+* upgrade bigtable.version to 2.2.0 ([#3304](https://www.github.com/googleapis/java-bigtable-hbase/issues/3304)) ([1ecdbe9](https://www.github.com/googleapis/java-bigtable-hbase/commit/1ecdbe9e2557df04ad0085de443c7ae1a7b57db5))
+
+
+### Miscellaneous Chores
+
+* make next tag 2.0.0-beta3 ([#3374](https://www.github.com/googleapis/java-bigtable-hbase/issues/3374)) ([6a51dd1](https://www.github.com/googleapis/java-bigtable-hbase/commit/6a51dd1fcdb9699b51ceb7f92aa994199123eacd))
+
+## [2.0.0-beta2](https://www.github.com/googleapis/java-bigtable-hbase/compare/v2.0.0-beta1...v2.0.0-beta2) (2021-10-19)
+
+**Note: This beta release is a work-in-progress. For the latest stable release of java-bigtable-hbase, please refer to version [1.25.0](https://github.com/googleapis/java-bigtable-hbase/releases/tag/v1.25.0).**
+
+### Bug Fixes
+
+* fix user agent format ([#3288](https://www.github.com/googleapis/java-bigtable-hbase/issues/3288)) ([49ad39f](https://www.github.com/googleapis/java-bigtable-hbase/commit/49ad39f6ef5e5bd2c294202685816c9f2b61e74b))
+
+
+### Miscellaneous Chores
+
+* bump next tag to 2.0.0-beta2 ([#3283](https://www.github.com/googleapis/java-bigtable-hbase/issues/3283)) ([8753c30](https://www.github.com/googleapis/java-bigtable-hbase/commit/8753c309004e50b208f72e01bdc9318349297254))
+
+
+### Dependencies
+
+* upgrade beam to 2.30.0 ([#3284](https://www.github.com/googleapis/java-bigtable-hbase/issues/3284)) ([05e80e2](https://www.github.com/googleapis/java-bigtable-hbase/commit/05e80e20f0a9013a06c2a65d2a5cfc9221b5d5c1))
+* upgrade hbase1 to 1.7.1 ([#3270](https://www.github.com/googleapis/java-bigtable-hbase/issues/3270)) ([2c5bfe9](https://www.github.com/googleapis/java-bigtable-hbase/commit/2c5bfe934718371eed0069d6c8d3b08b5356a7d2))
+
+## [2.0.0-beta1](https://www.github.com/googleapis/java-bigtable-hbase/compare/v2.0.0-alpha2...v2.0.0-beta1) (2021-10-07)
+
+**Note: This beta release is a work-in-progress. For the latest stable release of java-bigtable-hbase, please refer to version [1.25.0](https://github.com/googleapis/java-bigtable-hbase/releases/tag/v1.25.0).**
+
+This is the first beta release of Bigtable HBase 2.0.0. The main focus of Bigtable Hbase 2.0.0 is to migrate the internals from bigtable-client-core to [java-bigtable](https://github.com/googleapis/java-bigtable).
+The primary difference from alpha is that bigtable-client-core has been completely removed and CloudBigtableIO has been migrated as well.
+This release will remove the configuration available in alpha to switch between the core Bigtable layer (bigtable-client-core) and the [java-bigtable](https://github.com/googleapis/java-bigtable) client library, and will only use the java-bigtable client to connect to the Bigtable API.
+
+We look forward to hearing your feedback! Please let us know any comments or issues in our issue tracker.
+
+Complete release notes below:
+
+### Bug Fixes
+
+* adding validation for snapshot name for hbase import pipeline ([#3203](https://www.github.com/googleapis/java-bigtable-hbase/issues/3203)) ([fa9991a](https://www.github.com/googleapis/java-bigtable-hbase/commit/fa9991a2703c0faf4a1ba5737f5844619a497c17))
+* Clean up RowResultAdapter ([#3267](https://www.github.com/googleapis/java-bigtable-hbase/issues/3267)) ([1ccf063](https://www.github.com/googleapis/java-bigtable-hbase/commit/1ccf0634c73c5ccef1efc612009ed66e11798021))
+
+
+### Dependencies
+
+* update bigtable.version to v2.1.4 ([#3246](https://www.github.com/googleapis/java-bigtable-hbase/issues/3246)) ([8636efb](https://www.github.com/googleapis/java-bigtable-hbase/commit/8636efb8ba321d911f812a20e347f89a43365ec8))
+* update dependency com.google.cloud:google-cloud-bigtable-emulator to v0.138.4 ([#3247](https://www.github.com/googleapis/java-bigtable-hbase/issues/3247)) ([7a3057f](https://www.github.com/googleapis/java-bigtable-hbase/commit/7a3057fbdec07758d8c24d544b6ae371f4afa288))
+* update dependency org.codehaus.plexus:plexus-utils to v3.4.1 ([#3249](https://www.github.com/googleapis/java-bigtable-hbase/issues/3249)) ([dfb729f](https://www.github.com/googleapis/java-bigtable-hbase/commit/dfb729f5a4ab71f8789f304942e9154d6f546273))
+* update jmh.version to v1.33 ([#3254](https://www.github.com/googleapis/java-bigtable-hbase/issues/3254)) ([ce8110e](https://www.github.com/googleapis/java-bigtable-hbase/commit/ce8110e7639c7524f962282a6d397a33253bca62))
+
+
+### Miscellaneous Chores
+
+* bump next tag to 2.0.0-beta1 ([#3277](https://www.github.com/googleapis/java-bigtable-hbase/issues/3277)) ([499d48b](https://www.github.com/googleapis/java-bigtable-hbase/commit/499d48bbad69c1639cfc523bfb4d9226dd4c4a65))
+* make next tag 2.0.0-alpha3 ([#3207](https://www.github.com/googleapis/java-bigtable-hbase/issues/3207)) ([a6241e1](https://www.github.com/googleapis/java-bigtable-hbase/commit/a6241e1c800592e560d6bdd2bfa832e783bc1ed2))
+
## [2.0.0-alpha2](https://www.github.com/googleapis/java-bigtable-hbase/compare/v2.0.0-alpha1...v2.0.0-alpha2) (2021-08-19)
diff --git a/README.md b/README.md
index 926470da8b..36934ab97f 100644
--- a/README.md
+++ b/README.md
@@ -46,20 +46,23 @@ which makes it easy for development teams to get started.
com.google.cloud.bigtablebigtable-hbase-1.x
- 2.0.0-alpha2
+ 2.0.0-beta6
```
Gradle:
```Groovy
- compile 'com.google.cloud.bigtable:bigtable-hbase-1.x:2.0.0-alpha2'
+ compile 'com.google.cloud.bigtable:bigtable-hbase-1.x:2.0.0-beta6'
```
SBT:
```Scala
- libraryDependencies += "com.google.cloud.bigtable" % "bigtable-hbase-1.x" % "2.0.0-alpha2"
+ libraryDependencies += "com.google.cloud.bigtable" % "bigtable-hbase-1.x" % "2.0.0-beta6"
```
[//]: # ({x-version-update-end})
+
+* Refer to the [Connecting to Bigtable](https://cloud.google.com/bigtable/docs/hbase-connecting) documentation for detailed demonstrations of how to configure the properties to connect to Cloud Bigtable.
+
* Refer to the [Java samples documentation](https://cloud.google.com/bigtable/docs/samples) for detailed demonstrations of how to read and write data with Cloud Bigtable. The code for these samples is available in the [Cloud Bigtable examples project](https://github.com/GoogleCloudPlatform/cloud-bigtable-examples).
## OpenCensus Integration
@@ -81,7 +84,7 @@ If you are _not_ using the shaded Bigtable HBase Client artifact, you need to de
com.google.cloud.bigtablebigtable-hbase-1.x
- 2.0.0-alpha2
+ 2.0.0-beta6
```
[//]: # ({x-version-update-end})
@@ -196,7 +199,7 @@ If you are _not_ using the shaded Bigtable HBase Client artifact, you need to de
com.google.cloud.bigtablebigtable-hbase-1.x
- 2.0.0-alpha2
+ 2.0.0-beta6
```
[//]: # ({x-version-update-end})
@@ -316,7 +319,7 @@ Java is a registered trademark of Oracle and/or its affiliates.
[maven-examples-repo]: https://github.com/GoogleCloudPlatform/cloud-bigtable-examples
[google-cloud-bigtable-discuss]: https://groups.google.com/group/google-cloud-bigtable-discuss
[google-cloud-bigtable-announce]: https://groups.google.com/group/google-cloud-bigtable-announce
-[google-cloud-bigtable-emulator]: https://github.com/googleapis/google-cloud-java/tree/master/google-cloud-testing/google-cloud-bigtable-emulator
+[google-cloud-bigtable-emulator]: https://github.com/googleapis/google-cloud-java/tree/main/google-cloud-testing/google-cloud-bigtable-emulator
[google-cloud-bigtable]: https://github.com/googleapis/java-bigtable
[kokoro-badge-image-1]: http://storage.googleapis.com/cloud-devrel-public/java/badges/java-bigtable-hbase/java8.svg
[kokoro-badge-link-1]: http://storage.googleapis.com/cloud-devrel-public/java/badges/java-bigtable-hbase/java8.html
@@ -326,6 +329,6 @@ Java is a registered trademark of Oracle and/or its affiliates.
[kokoro-badge-link-3]: http://storage.googleapis.com/cloud-devrel-public/java/badges/java-bigtable-hbase/java11.html
[kokoro-badge-image-4]: http://storage.googleapis.com/cloud-devrel-public/java/badges/java-bigtable-hbase/integration.svg
[kokoro-badge-link-4]: http://storage.googleapis.com/cloud-devrel-public/java/badges/java-bigtable-hbase/integration.html
-[contributing]: https://github.com/googleapis/java-bigtable-hbase/blob/master/CONTRIBUTING.md
-[code-of-conduct]: https://github.com/googleapis/java-bigtable-hbase/blob/master/CODE_OF_CONDUCT.md#contributor-code-of-conduct
-[license]: https://github.com/googleapis/java-bigtable-hbase/blob/master/LICENSE
+[contributing]: https://github.com/googleapis/java-bigtable-hbase/blob/main/CONTRIBUTING.md
+[code-of-conduct]: https://github.com/googleapis/java-bigtable-hbase/blob/main/CODE_OF_CONDUCT.md#contributor-code-of-conduct
+[license]: https://github.com/googleapis/java-bigtable-hbase/blob/main/LICENSE
diff --git a/bigtable-client-core-parent/bigtable-hbase-integration-tests-common/pom.xml b/bigtable-client-core-parent/bigtable-hbase-integration-tests-common/pom.xml
index f74d3ba075..c6c7857a69 100644
--- a/bigtable-client-core-parent/bigtable-hbase-integration-tests-common/pom.xml
+++ b/bigtable-client-core-parent/bigtable-hbase-integration-tests-common/pom.xml
@@ -19,7 +19,7 @@ limitations under the License.
com.google.cloud.bigtablebigtable-client-core-parent
- 2.0.0-alpha3-SNAPSHOT
+ 2.0.0-beta6bigtable-hbase-integration-tests-common
@@ -87,7 +87,7 @@ limitations under the License.
org.apache.maven.pluginsmaven-deploy-plugin
- 3.0.0-M1
+ 3.0.0-M2true
diff --git a/bigtable-client-core-parent/bigtable-hbase-integration-tests-common/src/test/java/com/google/cloud/bigtable/hbase/test_env/BigtableEnv.java b/bigtable-client-core-parent/bigtable-hbase-integration-tests-common/src/test/java/com/google/cloud/bigtable/hbase/test_env/BigtableEnv.java
index 93b2442fe2..ffe9d89011 100644
--- a/bigtable-client-core-parent/bigtable-hbase-integration-tests-common/src/test/java/com/google/cloud/bigtable/hbase/test_env/BigtableEnv.java
+++ b/bigtable-client-core-parent/bigtable-hbase-integration-tests-common/src/test/java/com/google/cloud/bigtable/hbase/test_env/BigtableEnv.java
@@ -52,8 +52,7 @@ class BigtableEnv extends SharedTestEnv {
"google.bigtable.instance.id",
"google.bigtable.use.bulk.api",
"google.bigtable.use.plaintext.negotiation",
- "google.bigtable.snapshot.cluster.id",
- "google.bigtable.use.gcj.client");
+ "google.bigtable.snapshot.cluster.id");
@Override
protected void setup() throws IOException {
diff --git a/bigtable-client-core-parent/bigtable-hbase/pom.xml b/bigtable-client-core-parent/bigtable-hbase/pom.xml
index f64b553236..3437c2fd7d 100644
--- a/bigtable-client-core-parent/bigtable-hbase/pom.xml
+++ b/bigtable-client-core-parent/bigtable-hbase/pom.xml
@@ -20,7 +20,7 @@ limitations under the License.
com.google.cloud.bigtablebigtable-client-core-parent
- 2.0.0-alpha3-SNAPSHOT
+ 2.0.0-beta6bigtable-hbase
@@ -52,12 +52,6 @@ limitations under the License.
-
- ${project.groupId}
- bigtable-client-core
- ${bigtable-client-core.version}
-
-
${project.groupId}bigtable-metrics-api
@@ -178,7 +172,7 @@ limitations under the License.
com.google.cloud.bigtablebigtable-internal-test-helper
- 2.0.0-alpha3-SNAPSHOT
+ 2.0.0-beta6test
@@ -207,7 +201,7 @@ limitations under the License.
com.google.truthtruth
- 1.1.2
+ 1.1.3test
@@ -250,7 +244,7 @@ limitations under the License.
com.google.cloud.bigtable.testbigtable-build-helper
- 2.0.0-alpha3-SNAPSHOT
+ 2.0.0-beta6
+ 2.0.0-beta6bigtable-client-core-parent
diff --git a/bigtable-dataflow-parent/bigtable-beam-import/clirr-ignored-differences.xml b/bigtable-dataflow-parent/bigtable-beam-import/clirr-ignored-differences.xml
deleted file mode 100644
index b30e833c52..0000000000
--- a/bigtable-dataflow-parent/bigtable-beam-import/clirr-ignored-differences.xml
+++ /dev/null
@@ -1,9 +0,0 @@
-
-
-
-
- 7002
- com/google/cloud/bigtable/beam/TemplateUtils
- **
-
-
\ No newline at end of file
diff --git a/bigtable-dataflow-parent/bigtable-beam-import/pom.xml b/bigtable-dataflow-parent/bigtable-beam-import/pom.xml
index 54a008e597..539cf94180 100644
--- a/bigtable-dataflow-parent/bigtable-beam-import/pom.xml
+++ b/bigtable-dataflow-parent/bigtable-beam-import/pom.xml
@@ -18,7 +18,7 @@ limitations under the License.
com.google.cloud.bigtablebigtable-dataflow-parent
- 2.0.0-alpha3-SNAPSHOT
+ 2.0.0-beta64.0.0
@@ -54,11 +54,23 @@ limitations under the License.
org.apache.beambeam-sdks-java-core
+
+ com.google.cloud.bigdataoss
+ gcs-connector
+ hadoop2-2.2.2
+ shaded
+ org.apache.beam
beam-sdks-java-extensions-google-cloud-platform-core
+
+
+ org.hamcrest
+ hamcrest
+
+ org.apache.beam
@@ -73,6 +85,14 @@ limitations under the License.
com.google.api.grpcgrpc-google-cloud-bigtable-v2
+
+ org.hamcrest
+ hamcrest
+
+
+ org.hamcrest
+ hamcrest
+
@@ -80,10 +100,22 @@ limitations under the License.
beam-sdks-java-io-hadoop-commonruntime
+
+
+ org.hamcrest
+ hamcrest
+
+ org.apache.beambeam-sdks-java-io-hadoop-format
+
+
+ org.hamcrest
+ hamcrest
+
+
@@ -91,6 +123,12 @@ limitations under the License.
org.apache.beambeam-runners-core-construction-javaruntime
+
+
+ org.hamcrest
+ hamcrest
+
+ org.apache.beam
@@ -103,7 +141,7 @@ limitations under the License.
${project.groupId}bigtable-hbase-beam
- 2.0.0-alpha3-SNAPSHOT
+ 2.0.0-beta6org.apache.hbase
@@ -115,7 +153,7 @@ limitations under the License.
com.google.cloud.bigtablebigtable-hbase-1.x-shaded
- 2.0.0-alpha3-SNAPSHOT
+ 2.0.0-beta6
@@ -153,25 +191,13 @@ limitations under the License.
hbase-shaded-server${hbase1.version}
-
- com.google.cloud.bigdataoss
- gcs-connector
- hadoop2-2.1.4
- shaded
- com.google.apisgoogle-api-services-storage
- v1-rev171-1.25.0
-
-
- com.google.auto.value
- auto-value
- 1.8.2
- provided
+ v1-rev20210127-1.31.0com.google.code.findbugs
@@ -181,7 +207,7 @@ limitations under the License.
com.google.guavaguava
- 30.1-jre
+ 31.0-jrecommons-logging
@@ -215,7 +241,6 @@ limitations under the License.
runtime
-
org.apache.beam
@@ -242,7 +267,7 @@ limitations under the License.
com.google.truthtruth
- 1.1.2
+ 1.1.3test
@@ -400,7 +425,7 @@ limitations under the License.
com.google.cloud.bigtable.testbigtable-build-helper
- 2.0.0-alpha3-SNAPSHOT
+ 2.0.0-beta6verify-mirror-deps
diff --git a/bigtable-dataflow-parent/bigtable-beam-import/src/main/java/com/google/cloud/bigtable/beam/hbasesnapshots/HBaseSnapshotInputConfigBuilder.java b/bigtable-dataflow-parent/bigtable-beam-import/src/main/java/com/google/cloud/bigtable/beam/hbasesnapshots/HBaseSnapshotInputConfigBuilder.java
index 63ebddb20a..428f987f14 100644
--- a/bigtable-dataflow-parent/bigtable-beam-import/src/main/java/com/google/cloud/bigtable/beam/hbasesnapshots/HBaseSnapshotInputConfigBuilder.java
+++ b/bigtable-dataflow-parent/bigtable-beam-import/src/main/java/com/google/cloud/bigtable/beam/hbasesnapshots/HBaseSnapshotInputConfigBuilder.java
@@ -95,13 +95,16 @@ public String getRestoreDir() {
}
public Configuration build() throws Exception {
- Preconditions.checkNotNull(projectId, "Required value projectId must be set");
- Preconditions.checkNotNull(
- hbaseSnapshotSourceDir, "Required value hbaseSnapshotSourceDir must be set");
- Preconditions.checkNotNull(snapshotName, "Required value snapshotName must be set");
+ Preconditions.checkState(
+ projectId != null && !projectId.isEmpty(), "Required value projectId must be set");
+ Preconditions.checkState(
+ hbaseSnapshotSourceDir != null && !hbaseSnapshotSourceDir.isEmpty(),
+ "Required value hbaseSnapshotSourceDir must be set");
+ Preconditions.checkState(
+ snapshotName != null && !snapshotName.isEmpty(), "Required value snapshotName must be set");
Preconditions.checkState(
hbaseSnapshotSourceDir.startsWith(GcsPath.SCHEME),
- "snapshot folder must be hosted in a GCS bucket ");
+ "Snapshot folder must be hosted in a GCS bucket");
Configuration conf = createHBaseConfiguration();
diff --git a/bigtable-dataflow-parent/bigtable-beam-import/src/test/java/com/google/cloud/bigtable/beam/hbasesnapshots/HBaseSnapshotInputConfigBuilderTest.java b/bigtable-dataflow-parent/bigtable-beam-import/src/test/java/com/google/cloud/bigtable/beam/hbasesnapshots/HBaseSnapshotInputConfigBuilderTest.java
index 579a57c238..fb5346f72a 100644
--- a/bigtable-dataflow-parent/bigtable-beam-import/src/test/java/com/google/cloud/bigtable/beam/hbasesnapshots/HBaseSnapshotInputConfigBuilderTest.java
+++ b/bigtable-dataflow-parent/bigtable-beam-import/src/test/java/com/google/cloud/bigtable/beam/hbasesnapshots/HBaseSnapshotInputConfigBuilderTest.java
@@ -16,6 +16,7 @@
package com.google.cloud.bigtable.beam.hbasesnapshots;
import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.mapreduce.TableSnapshotInputFormat;
@@ -45,4 +46,87 @@ public void testBuildingHBaseSnapshotInputConfigBuilder() {
conf.getClass(
"mapreduce.job.inputformat.class", TableSnapshotInputFormat.class, InputFormat.class));
}
+
+ @Test
+ public void testInvalidProjectHBaseSnapshotInputConfig() {
+ try {
+ new HBaseSnapshotInputConfigBuilder()
+ .setSnapshotName(TEST_SNAPSHOT_NAME)
+ .setHbaseSnapshotSourceDir(TEST_SNAPSHOT_DIR)
+ .build();
+ fail("Expected unset project to fail");
+ } catch (Exception e) {
+ assertEquals(e.getMessage(), "Required value projectId must be set");
+ }
+
+ try {
+ new HBaseSnapshotInputConfigBuilder()
+ .setProjectId("")
+ .setSnapshotName(TEST_SNAPSHOT_NAME)
+ .setHbaseSnapshotSourceDir(TEST_SNAPSHOT_DIR)
+ .build();
+ fail("Expected empty project to fail");
+ } catch (Exception e) {
+ assertEquals(e.getMessage(), "Required value projectId must be set");
+ }
+ }
+
+ @Test
+ public void testInvalidSnapshotHBaseSnapshotInputConfig() {
+ try {
+ new HBaseSnapshotInputConfigBuilder()
+ .setProjectId(TEST_PROJECT)
+ .setHbaseSnapshotSourceDir(TEST_SNAPSHOT_DIR)
+ .build();
+ fail("Expected unset snapshot name to fail");
+ } catch (Exception e) {
+ assertEquals(e.getMessage(), "Required value snapshotName must be set");
+ }
+
+ try {
+ new HBaseSnapshotInputConfigBuilder()
+ .setProjectId(TEST_PROJECT)
+ .setSnapshotName("")
+ .setHbaseSnapshotSourceDir(TEST_SNAPSHOT_DIR)
+ .build();
+ fail("Expected empty snapshot name to fail");
+ } catch (Exception e) {
+ assertEquals(e.getMessage(), "Required value snapshotName must be set");
+ }
+ }
+
+ @Test
+ public void testInvalidSourceDirHBaseSnapshotInputConfig() {
+ try {
+ new HBaseSnapshotInputConfigBuilder()
+ .setProjectId(TEST_PROJECT)
+ .setSnapshotName(TEST_SNAPSHOT_NAME)
+ .build();
+ fail("Expected unset snapshot directory to fail");
+ } catch (Exception e) {
+ assertEquals(e.getMessage(), "Required value hbaseSnapshotSourceDir must be set");
+ }
+
+ try {
+ new HBaseSnapshotInputConfigBuilder()
+ .setProjectId(TEST_PROJECT)
+ .setSnapshotName(TEST_SNAPSHOT_NAME)
+ .setHbaseSnapshotSourceDir("")
+ .build();
+ fail("Expected empty snapshot directory to fail");
+ } catch (Exception e) {
+ assertEquals(e.getMessage(), "Required value hbaseSnapshotSourceDir must be set");
+ }
+
+ try {
+ new HBaseSnapshotInputConfigBuilder()
+ .setProjectId(TEST_PROJECT)
+ .setSnapshotName(TEST_SNAPSHOT_NAME)
+ .setHbaseSnapshotSourceDir("test-bucket/hbase-export")
+ .build();
+ fail("Expected snapshot directory without gs prefix to fail");
+ } catch (Exception e) {
+ assertEquals(e.getMessage(), "Snapshot folder must be hosted in a GCS bucket");
+ }
+ }
}
diff --git a/bigtable-dataflow-parent/bigtable-hbase-beam/pom.xml b/bigtable-dataflow-parent/bigtable-hbase-beam/pom.xml
index 3a6e2f6b1b..446ec94825 100644
--- a/bigtable-dataflow-parent/bigtable-hbase-beam/pom.xml
+++ b/bigtable-dataflow-parent/bigtable-hbase-beam/pom.xml
@@ -16,7 +16,7 @@ limitations under the License.
com.google.cloud.bigtablebigtable-dataflow-parent
- 2.0.0-alpha3-SNAPSHOT
+ 2.0.0-beta6bigtable-hbase-beam
@@ -33,7 +33,7 @@ limitations under the License.
com.google.errorproneerror_prone_annotations
- 2.3.4
+ 2.10.0
@@ -44,6 +44,12 @@ limitations under the License.
org.apache.beambeam-sdks-java-core${beam.version}
+
+
+ org.hamcrest
+ hamcrest
+
+
@@ -61,6 +67,10 @@ limitations under the License.
org.slf4jslf4j-log4j12
+
+ org.hamcrest
+ hamcrest
+
@@ -68,7 +78,7 @@ limitations under the License.
${project.groupId}bigtable-hbase-1.x-shaded
- 2.0.0-alpha3-SNAPSHOT
+ 2.0.0-beta6
+ 2.0.0-beta6verify-mirror-deps
diff --git a/bigtable-dataflow-parent/bigtable-hbase-beam/src/main/java/com/google/cloud/bigtable/beam/CloudBigtableConfiguration.java b/bigtable-dataflow-parent/bigtable-hbase-beam/src/main/java/com/google/cloud/bigtable/beam/CloudBigtableConfiguration.java
index e3d9d107d0..4ba1f5f733 100644
--- a/bigtable-dataflow-parent/bigtable-hbase-beam/src/main/java/com/google/cloud/bigtable/beam/CloudBigtableConfiguration.java
+++ b/bigtable-dataflow-parent/bigtable-hbase-beam/src/main/java/com/google/cloud/bigtable/beam/CloudBigtableConfiguration.java
@@ -15,12 +15,11 @@
*/
package com.google.cloud.bigtable.beam;
-import com.google.bigtable.repackaged.com.google.cloud.bigtable.config.BigtableOptions;
-import com.google.bigtable.repackaged.com.google.cloud.bigtable.config.BigtableVersionInfo;
+import com.google.bigtable.repackaged.com.google.cloud.bigtable.Version;
import com.google.bigtable.repackaged.com.google.common.base.Preconditions;
import com.google.bigtable.repackaged.com.google.common.collect.ImmutableMap;
+import com.google.cloud.bigtable.hbase.BigtableConfiguration;
import com.google.cloud.bigtable.hbase.BigtableOptionsFactory;
-import java.io.IOException;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
@@ -110,9 +109,8 @@ Builder withAppProfileId(ValueProvider appProfileId) {
}
/**
- * Adds additional connection configuration. {@link
- * BigtableOptionsFactory#fromConfiguration(Configuration)} for more information about
- * configuration options.
+ * Adds additional connection configuration. {@link BigtableOptionsFactory} for more information
+ * about configuration options.
*
* @return The {@link CloudBigtableConfiguration.Builder} for chaining convenience.
*/
@@ -121,9 +119,8 @@ public Builder withConfiguration(String key, String value) {
}
/**
- * Adds additional connection configuration. {@link
- * BigtableOptionsFactory#fromConfiguration(Configuration)} for more information about
- * configuration options.
+ * Adds additional connection configuration. {@link BigtableOptionsFactory} for more information
+ * about configuration options.
*
* @return The {@link CloudBigtableConfiguration.Builder} for chaining convenience.
*/
@@ -157,8 +154,7 @@ public CloudBigtableConfiguration build() {
* @param projectId The project ID for the instance.
* @param instanceId The instance ID.
* @param additionalConfiguration A {@link Map} with additional connection configuration. See
- * {@link BigtableOptionsFactory#fromConfiguration(Configuration)} for more information about
- * configuration options.
+ * {@link BigtableOptionsFactory} for more information about configuration options.
*/
protected CloudBigtableConfiguration(
ValueProvider projectId,
@@ -198,15 +194,6 @@ public String getAppProfileId() {
return configuration.get(BigtableOptionsFactory.APP_PROFILE_ID_KEY).get();
}
- /**
- * Converts the {@link CloudBigtableConfiguration} to a {@link BigtableOptions} object.
- *
- * @return The {@link BigtableOptions} object.
- */
- public BigtableOptions toBigtableOptions() throws IOException {
- return BigtableOptionsFactory.fromConfiguration(toHBaseConfig());
- }
-
/**
* Converts the {@link CloudBigtableConfiguration} to an HBase {@link Configuration}.
*
@@ -215,6 +202,8 @@ public BigtableOptions toBigtableOptions() throws IOException {
public Configuration toHBaseConfig() {
Configuration config = new Configuration(false);
+ config = BigtableConfiguration.configure(config, this.getProjectId(), this.getInstanceId());
+
/**
* Sets below setting for batch BIGTABLE_USE_CACHED_DATA_CHANNEL_POOL = true
* BigtableOptionsFactory.BIGTABLE_HOST_KEY = batch-bigtable.googleapis.com
@@ -322,7 +311,7 @@ public void populateDisplayData(DisplayData.Builder builder) {
getDisplayValue(configuration.get(BigtableOptionsFactory.INSTANCE_ID_KEY)))
.withLabel("Instance ID"));
builder.add(
- DisplayData.item("bigtableClientVersion", BigtableVersionInfo.CLIENT_VERSION)
+ DisplayData.item("bigtableClientVersion", Version.VERSION)
.withLabel("Bigtable Client Version"));
Map> hashMap =
new HashMap>(configuration);
diff --git a/bigtable-dataflow-parent/bigtable-hbase-beam/src/main/java/com/google/cloud/bigtable/beam/CloudBigtableIO.java b/bigtable-dataflow-parent/bigtable-hbase-beam/src/main/java/com/google/cloud/bigtable/beam/CloudBigtableIO.java
index 90bf83b8ba..3b46d75753 100755
--- a/bigtable-dataflow-parent/bigtable-hbase-beam/src/main/java/com/google/cloud/bigtable/beam/CloudBigtableIO.java
+++ b/bigtable-dataflow-parent/bigtable-hbase-beam/src/main/java/com/google/cloud/bigtable/beam/CloudBigtableIO.java
@@ -17,20 +17,10 @@
import com.google.bigtable.repackaged.com.google.api.core.InternalApi;
import com.google.bigtable.repackaged.com.google.api.core.InternalExtensionOnly;
-import com.google.bigtable.repackaged.com.google.cloud.bigtable.config.BulkOptions;
import com.google.bigtable.repackaged.com.google.cloud.bigtable.data.v2.models.KeyOffset;
-import com.google.bigtable.repackaged.com.google.cloud.bigtable.grpc.BigtableInstanceName;
-import com.google.bigtable.repackaged.com.google.cloud.bigtable.grpc.BigtableSession;
-import com.google.bigtable.repackaged.com.google.cloud.bigtable.grpc.BigtableSessionSharedThreadPools;
-import com.google.bigtable.repackaged.com.google.cloud.bigtable.grpc.async.ResourceLimiterStats;
-import com.google.bigtable.repackaged.com.google.cloud.bigtable.grpc.scanner.FlatRow;
-import com.google.bigtable.repackaged.com.google.cloud.bigtable.grpc.scanner.ResultScanner;
import com.google.bigtable.repackaged.com.google.common.annotations.VisibleForTesting;
import com.google.bigtable.repackaged.com.google.common.base.Preconditions;
import com.google.cloud.bigtable.batch.common.CloudBigtableServiceImpl;
-import com.google.cloud.bigtable.hbase.BigtableOptionsFactory;
-import com.google.cloud.bigtable.hbase.adapters.read.FlatRowAdapter;
-import com.google.cloud.bigtable.hbase.util.ByteStringer;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
@@ -38,7 +28,6 @@
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
-import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.beam.sdk.annotations.Experimental;
import org.apache.beam.sdk.coders.CannotProvideCoderException;
@@ -66,11 +55,13 @@
import org.apache.hadoop.hbase.client.BufferedMutator.ExceptionListener;
import org.apache.hadoop.hbase.client.BufferedMutatorParams;
import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.RegionLocator;
import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
@@ -130,8 +121,6 @@
@Experimental
public class CloudBigtableIO {
- private static final FlatRowAdapter FLAT_ROW_ADAPTER = new FlatRowAdapter();
-
/**
* A {@link BoundedSource} for a Cloud Bigtable {@link Table}, which is potentially filtered by a
* {@link Scan}.
@@ -291,9 +280,10 @@ public void validate() {
/**
* Gets an estimated size based on data returned from {@link #getSampleRowKeys}. The estimate
- * will be high if a {@link Scan} is set on the {@link CloudBigtableScanConfiguration}; in such
- * cases, the estimate will not take the Scan into account, and will return a larger estimate
- * than what the {@link CloudBigtableIO.Reader} will actually read.
+ * will be high if a {@link org.apache.hadoop.hbase.client.Scan} is set on the {@link
+ * CloudBigtableScanConfiguration}; in such cases, the estimate will not take the Scan into
+ * account, and will return a larger estimate than what the {@link CloudBigtableIO.Reader} will
+ * actually read.
*
* @param options The pipeline options.
* @return The estimated size of the data, in bytes.
@@ -550,8 +540,8 @@ static class Reader extends BoundedReader {
private CloudBigtableIO.AbstractSource source;
- private transient BigtableSession session;
- private transient ResultScanner scanner;
+ private transient Connection connection;
+ private transient ResultScanner scanner;
private transient Result current;
protected long workStart;
private final AtomicLong rowsRead = new AtomicLong();
@@ -578,19 +568,24 @@ public boolean start() throws IOException {
void initializeScanner() throws IOException {
Configuration config = source.getConfiguration().toHBaseConfig();
- // This will use cached data channels under the covers.
- session = new BigtableSession(BigtableOptionsFactory.fromConfiguration(config));
- scanner = session.getDataClient().readFlatRows(source.getConfiguration().getRequest());
+ connection = ConnectionFactory.createConnection(config);
+ Scan scan =
+ new Scan()
+ .withStartRow(source.getConfiguration().getZeroCopyStartRow())
+ .withStopRow(source.getConfiguration().getZeroCopyStopRow())
+ .setMaxVersions(Integer.MAX_VALUE);
+ scanner =
+ connection
+ .getTable(TableName.valueOf(source.getConfiguration().getTableId()))
+ .getScanner(scan);
}
/** Calls {@link ResultScanner#next()}. */
@Override
public boolean advance() throws IOException {
- FlatRow row = scanner.next();
- if (row != null
- && rangeTracker.tryReturnRecordAt(
- true, ByteKey.copyFrom(ByteStringer.extract(row.getRowKey())))) {
- current = FLAT_ROW_ADAPTER.adaptResponse(row);
+ Result row = scanner.next();
+ if (row != null && rangeTracker.tryReturnRecordAt(true, ByteKey.copyFrom(row.getRow()))) {
+ current = row;
rowsRead.addAndGet(1l);
return true;
} else {
@@ -674,12 +669,12 @@ public final synchronized BoundedSource splitAtFraction(double fraction)
}
@VisibleForTesting
- protected void setSession(BigtableSession session) {
- this.session = session;
+ protected void setConnection(Connection connection) {
+ this.connection = connection;
}
@VisibleForTesting
- protected void setScanner(ResultScanner scanner) {
+ protected void setScanner(ResultScanner scanner) {
this.scanner = scanner;
}
@@ -732,41 +727,6 @@ public String toString() {
///////////////////// Write Class /////////////////////////////////
- private static class MutationStatsExporter {
- protected static final Logger STATS_LOG = LoggerFactory.getLogger(AbstractSource.class);
- private static Map mutationStatsExporters = new HashMap<>();
-
- static synchronized void initializeMutationStatsExporter(BigtableInstanceName instanceName) {
- String key = instanceName.toString();
- MutationStatsExporter mutationStatsExporter = mutationStatsExporters.get(key);
- if (mutationStatsExporter == null) {
- mutationStatsExporter = new MutationStatsExporter();
- mutationStatsExporter.startExport(instanceName);
- mutationStatsExporters.put(key, mutationStatsExporter);
- }
- }
-
- protected void startExport(final BigtableInstanceName instanceName) {
- Runnable r =
- new Runnable() {
- @Override
- public void run() {
- try {
- BufferedMutatorDoFn.cumulativeThrottlingSeconds.set(
- TimeUnit.NANOSECONDS.toSeconds(
- ResourceLimiterStats.getInstance(instanceName)
- .getCumulativeThrottlingTimeNanos()));
- } catch (Exception e) {
- STATS_LOG.warn("Something bad happened in export stats", e);
- }
- }
- };
- BigtableSessionSharedThreadPools.getInstance()
- .getRetryExecutor()
- .scheduleAtFixedRate(r, 5, 5, TimeUnit.MILLISECONDS);
- }
- }
-
/**
* This is a DoFn that relies on {@link BufferedMutator} as the implementation to write data to
* Cloud Bigtable. The main function of this class is to manage Aggregators relating to mutations.
@@ -792,8 +752,7 @@ public BufferedMutatorDoFn(CloudBigtableConfiguration config) {
@Setup
public synchronized void setup() {
- MutationStatsExporter.initializeMutationStatsExporter(
- new BigtableInstanceName(config.getProjectId(), config.getInstanceId()));
+ // TODO set up buffered mutator stats exporter
}
protected BufferedMutator createBufferedMutator(Object context, String tableName)
@@ -801,7 +760,6 @@ protected BufferedMutator createBufferedMutator(Object context, String tableName
return getConnection()
.getBufferedMutator(
new BufferedMutatorParams(TableName.valueOf(tableName))
- .writeBufferSize(BulkOptions.BIGTABLE_MAX_MEMORY_DEFAULT)
.listener(createExceptionListener(context)));
}
diff --git a/bigtable-dataflow-parent/bigtable-hbase-beam/src/main/java/com/google/cloud/bigtable/beam/CloudBigtableScanConfiguration.java b/bigtable-dataflow-parent/bigtable-hbase-beam/src/main/java/com/google/cloud/bigtable/beam/CloudBigtableScanConfiguration.java
index f538d0fcdc..41edfd8259 100644
--- a/bigtable-dataflow-parent/bigtable-hbase-beam/src/main/java/com/google/cloud/bigtable/beam/CloudBigtableScanConfiguration.java
+++ b/bigtable-dataflow-parent/bigtable-hbase-beam/src/main/java/com/google/cloud/bigtable/beam/CloudBigtableScanConfiguration.java
@@ -19,9 +19,9 @@
import com.google.bigtable.repackaged.com.google.bigtable.v2.ReadRowsRequest;
import com.google.bigtable.repackaged.com.google.bigtable.v2.RowRange;
import com.google.bigtable.repackaged.com.google.bigtable.v2.RowSet;
+import com.google.bigtable.repackaged.com.google.cloud.bigtable.data.v2.internal.NameUtil;
import com.google.bigtable.repackaged.com.google.cloud.bigtable.data.v2.internal.RequestContext;
import com.google.bigtable.repackaged.com.google.cloud.bigtable.data.v2.models.Query;
-import com.google.bigtable.repackaged.com.google.cloud.bigtable.grpc.BigtableInstanceName;
import com.google.bigtable.repackaged.com.google.common.base.Preconditions;
import com.google.bigtable.repackaged.com.google.protobuf.ByteString;
import com.google.cloud.bigtable.hbase.adapters.Adapters;
@@ -98,21 +98,23 @@ Builder withQuery(Query query) {
}
/**
- * Specifies the {@link ReadRowsRequest} that will be used to filter the table.
- *
+ * @deprecated Please use {@link #withScan(Scan)} instead.
+ *
Specifies the {@link ReadRowsRequest} that will be used to filter the table.
* @param request The {@link ReadRowsRequest} to add to the configuration.
* @return The {@link CloudBigtableScanConfiguration.Builder} for chaining convenience.
*/
+ @Deprecated
public Builder withRequest(ReadRowsRequest request) {
return withRequest(StaticValueProvider.of(request));
}
/**
- * Specifies the {@link ReadRowsRequest} that will be used to filter the table.
- *
+ * @deprecated Please use {@link #withScan(Scan)} instead.
+ *
Specifies the {@link ReadRowsRequest} that will be used to filter the table.
* @param request The {@link ReadRowsRequest} to add to the configuration.
* @return The {@link CloudBigtableScanConfiguration.Builder} for chaining convenience.
*/
+ @Deprecated
public Builder withRequest(ValueProvider request) {
this.request = request;
return this;
@@ -273,9 +275,8 @@ private static class RequestWithTableNameValueProvider
public ReadRowsRequest get() {
if (cachedRequest == null) {
if (request.get().getTableName().isEmpty()) {
- BigtableInstanceName bigtableInstanceName =
- new BigtableInstanceName(projectId.get(), instanceId.get());
- String fullTableName = bigtableInstanceName.toTableNameStr(tableId.get());
+ String fullTableName =
+ NameUtil.formatTableName(projectId.get(), instanceId.get(), tableId.get());
cachedRequest = request.get().toBuilder().setTableName(fullTableName).build();
} else {
cachedRequest = request.get();
@@ -326,6 +327,7 @@ protected CloudBigtableScanConfiguration(
*
* @return The {@link Scan}.
*/
+ @Deprecated
public ReadRowsRequest getRequest() {
return request.get();
}
diff --git a/bigtable-dataflow-parent/bigtable-hbase-beam/src/test/java/com/google/cloud/bigtable/beam/CloudBigtableConfigurationTest.java b/bigtable-dataflow-parent/bigtable-hbase-beam/src/test/java/com/google/cloud/bigtable/beam/CloudBigtableConfigurationTest.java
index ffe4794d61..0288db7518 100755
--- a/bigtable-dataflow-parent/bigtable-hbase-beam/src/test/java/com/google/cloud/bigtable/beam/CloudBigtableConfigurationTest.java
+++ b/bigtable-dataflow-parent/bigtable-hbase-beam/src/test/java/com/google/cloud/bigtable/beam/CloudBigtableConfigurationTest.java
@@ -19,7 +19,7 @@
import static org.hamcrest.collection.IsIterableContainingInAnyOrder.containsInAnyOrder;
import static org.junit.Assert.assertEquals;
-import com.google.bigtable.repackaged.com.google.cloud.bigtable.config.BigtableVersionInfo;
+import com.google.bigtable.repackaged.com.google.cloud.bigtable.Version;
import com.google.cloud.bigtable.beam.CloudBigtableConfiguration.Builder;
import com.google.cloud.bigtable.hbase.BigtableOptionsFactory;
import java.util.ArrayList;
@@ -175,7 +175,7 @@ public void testPopulateDisplayData() {
expected.add("null:google.bigtable.project.id=my_project");
expected.add("null:google.bigtable.instance.id=instance");
expected.add("null:inaccessible=Unavailable during pipeline construction");
- expected.add("null:bigtableClientVersion=" + BigtableVersionInfo.CLIENT_VERSION);
+ expected.add("null:bigtableClientVersion=" + Version.VERSION);
MatcherAssert.assertThat(builder.itemStrings, containsInAnyOrder(expected.toArray()));
}
diff --git a/bigtable-dataflow-parent/bigtable-hbase-beam/src/test/java/com/google/cloud/bigtable/beam/CloudBigtableIOIntegrationTest.java b/bigtable-dataflow-parent/bigtable-hbase-beam/src/test/java/com/google/cloud/bigtable/beam/CloudBigtableIOIntegrationTest.java
index 07a6c5e336..cf11eee7e0 100755
--- a/bigtable-dataflow-parent/bigtable-hbase-beam/src/test/java/com/google/cloud/bigtable/beam/CloudBigtableIOIntegrationTest.java
+++ b/bigtable-dataflow-parent/bigtable-hbase-beam/src/test/java/com/google/cloud/bigtable/beam/CloudBigtableIOIntegrationTest.java
@@ -15,9 +15,9 @@
*/
package com.google.cloud.bigtable.beam;
-import com.google.bigtable.repackaged.com.google.cloud.bigtable.config.Logger;
import com.google.bigtable.repackaged.com.google.cloud.bigtable.data.v2.models.KeyOffset;
import com.google.cloud.bigtable.hbase.BigtableConfiguration;
+import com.google.cloud.bigtable.hbase.util.Logger;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
@@ -66,15 +66,15 @@ public class CloudBigtableIOIntegrationTest {
private static final Logger LOG = new Logger(CloudBigtableIOIntegrationTest.class);
- private static String projectId = System.getProperty(BIGTABLE_PROJECT_KEY);
- private static String instanceId = System.getProperty(BIGTABLE_INSTANCE_KEY);
+ private static final String projectId = System.getProperty(BIGTABLE_PROJECT_KEY);
+ private static final String instanceId = System.getProperty(BIGTABLE_INSTANCE_KEY);
- private static int LARGE_VALUE_SIZE = 201326;
+ private static final int LARGE_VALUE_SIZE = 201326;
@Rule public ExpectedException expectedException = ExpectedException.none();
public static TableName newTestTableName() {
- return TableName.valueOf("test-dataflow-" + UUID.randomUUID().toString());
+ return TableName.valueOf("test-dataflow-" + UUID.randomUUID());
}
private static TableName createNewTable(Admin admin) throws IOException {
@@ -173,7 +173,7 @@ protected KV> createKV(
QUALIFIER1,
Bytes.toBytes(RandomStringUtils.randomAlphanumeric(8))));
}
- return KV.>of(tableName.getNameAsString(), mutations);
+ return KV.of(tableName.getNameAsString(), mutations);
}
private void writeThroughDataflow(DoFn writer, int insertCount) throws Exception {
@@ -228,7 +228,7 @@ private void writeViaTable(TableName tableName, int rowCount) throws IOException
QUALIFIER1,
Bytes.toBytes(RandomStringUtils.randomAlphanumeric(8))));
}
- try (Table t = connection.getTable(tableName); ) {
+ try (Table t = connection.getTable(tableName)) {
t.put(puts);
}
}
@@ -329,18 +329,15 @@ public void testEstimatedAndSplitForLargeTable() throws Exception {
try {
for (final BoundedSource bundle : bundles) {
es.submit(
- new Runnable() {
- @Override
- public void run() {
- try (BoundedReader reader = bundle.createReader(null)) {
- reader.start();
- while (reader.getCurrent() != null) {
- count.incrementAndGet();
- reader.advance();
- }
- } catch (IOException e) {
- LOG.warn("Could not read bundle: %s", e, bundle);
+ () -> {
+ try (BoundedReader reader = bundle.createReader(null)) {
+ reader.start();
+ while (reader.getCurrent() != null) {
+ count.incrementAndGet();
+ reader.advance();
}
+ } catch (IOException e) {
+ LOG.warn("Could not read bundle: %s", e, bundle);
}
});
}
diff --git a/bigtable-dataflow-parent/bigtable-hbase-beam/src/test/java/com/google/cloud/bigtable/beam/CloudBigtableIOReaderTest.java b/bigtable-dataflow-parent/bigtable-hbase-beam/src/test/java/com/google/cloud/bigtable/beam/CloudBigtableIOReaderTest.java
index 8109d64def..74f8002137 100755
--- a/bigtable-dataflow-parent/bigtable-hbase-beam/src/test/java/com/google/cloud/bigtable/beam/CloudBigtableIOReaderTest.java
+++ b/bigtable-dataflow-parent/bigtable-hbase-beam/src/test/java/com/google/cloud/bigtable/beam/CloudBigtableIOReaderTest.java
@@ -18,16 +18,17 @@
import static org.mockito.Mockito.when;
import com.google.bigtable.repackaged.com.google.bigtable.v2.ReadRowsRequest;
-import com.google.bigtable.repackaged.com.google.cloud.bigtable.grpc.BigtableSession;
-import com.google.bigtable.repackaged.com.google.cloud.bigtable.grpc.scanner.FlatRow;
-import com.google.bigtable.repackaged.com.google.cloud.bigtable.grpc.scanner.ResultScanner;
+import com.google.bigtable.repackaged.com.google.common.collect.ImmutableList;
import com.google.bigtable.repackaged.com.google.protobuf.ByteString;
import java.io.IOException;
import org.apache.beam.sdk.io.BoundedSource;
import org.apache.beam.sdk.io.range.ByteKey;
import org.apache.beam.sdk.io.range.ByteKeyRange;
import org.apache.beam.sdk.io.range.ByteKeyRangeTracker;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Assert;
import org.junit.Rule;
@@ -44,9 +45,9 @@
public class CloudBigtableIOReaderTest {
@Rule public final MockitoRule mockitoRule = MockitoJUnit.rule();
- @Mock BigtableSession mockSession;
+ @Mock Connection mockConnection;
- @Mock ResultScanner mockScanner;
+ @Mock ResultScanner mockScanner;
@Mock CloudBigtableIO.AbstractSource mockSource;
@@ -77,7 +78,16 @@ public void testBasic() throws IOException {
private void setRowKey(String rowKey) throws IOException {
ByteString rowKeyByteString = ByteString.copyFrom(Bytes.toBytes(rowKey));
- FlatRow row = FlatRow.newBuilder().withRowKey(rowKeyByteString).build();
+ Result row =
+ Result.create(
+ ImmutableList.of(
+ new com.google.cloud.bigtable.hbase.adapters.read.RowCell(
+ Bytes.toBytes(rowKey),
+ Bytes.toBytes("cf"),
+ Bytes.toBytes("q"),
+ 10L,
+ Bytes.toBytes("value"),
+ ImmutableList.of("label"))));
when(mockScanner.next()).thenReturn(row);
}
@@ -86,7 +96,7 @@ private CloudBigtableIO.Reader initializeReader(CloudBigtableScanConfiguration c
return new CloudBigtableIO.Reader(mockSource) {
@Override
void initializeScanner() throws IOException {
- setSession(mockSession);
+ setConnection(mockConnection);
setScanner(mockScanner);
}
};
diff --git a/bigtable-dataflow-parent/bigtable-hbase-beam/src/test/java/com/google/cloud/bigtable/beam/CloudBigtableIOTest.java b/bigtable-dataflow-parent/bigtable-hbase-beam/src/test/java/com/google/cloud/bigtable/beam/CloudBigtableIOTest.java
index b41992004d..9e2175e8fa 100755
--- a/bigtable-dataflow-parent/bigtable-hbase-beam/src/test/java/com/google/cloud/bigtable/beam/CloudBigtableIOTest.java
+++ b/bigtable-dataflow-parent/bigtable-hbase-beam/src/test/java/com/google/cloud/bigtable/beam/CloudBigtableIOTest.java
@@ -19,8 +19,8 @@
import static org.junit.Assert.assertNotNull;
import static org.mockito.Mockito.when;
+import com.google.bigtable.repackaged.com.google.cloud.bigtable.data.v2.internal.ByteStringComparator;
import com.google.bigtable.repackaged.com.google.cloud.bigtable.data.v2.models.KeyOffset;
-import com.google.bigtable.repackaged.com.google.cloud.bigtable.util.ByteStringComparator;
import com.google.bigtable.repackaged.com.google.protobuf.ByteString;
import com.google.cloud.bigtable.beam.CloudBigtableIO.AbstractSource;
import com.google.cloud.bigtable.beam.CloudBigtableIO.Source;
diff --git a/bigtable-dataflow-parent/pom.xml b/bigtable-dataflow-parent/pom.xml
index 0046460722..a78568c2fb 100644
--- a/bigtable-dataflow-parent/pom.xml
+++ b/bigtable-dataflow-parent/pom.xml
@@ -20,7 +20,7 @@ limitations under the License.
com.google.cloud.bigtablebigtable-client-parent
- 2.0.0-alpha3-SNAPSHOT
+ 2.0.0-beta6bigtable-dataflow-parent
@@ -42,7 +42,7 @@ limitations under the License.
org.apache.maven.pluginsmaven-javadoc-plugin
- 3.2.0
+ 3.3.1default-cli
diff --git a/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-benchmarks/README.md b/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-benchmarks/README.md
index 29fa434792..98423a6e45 100644
--- a/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-benchmarks/README.md
+++ b/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-benchmarks/README.md
@@ -1,7 +1,6 @@
Bigtable Benchmark
-This module contains benchmarking test scenarios, which would be idle to check the difference between **bigtable-core** client & **GCJ veneer** client.
-after running for one method.
+This module contains benchmarking test scenarios, which uses **GCJ veneer** client.
It accepts following parameters:
@@ -11,6 +10,5 @@ $ java -jar target/benchmarks.jar \
-p projectId=[Project ID] \
-p instanceId=[Instance ID] \
-p rowShape="cellsPerRow/[#]/cellSize/[#]", ... \
- -p useBatch=false \
- -p useGCJ=true,false
+ -p useBatch=false
```
diff --git a/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-benchmarks/clirr-ignored-differences.xml b/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-benchmarks/clirr-ignored-differences.xml
deleted file mode 100644
index f367b556b2..0000000000
--- a/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-benchmarks/clirr-ignored-differences.xml
+++ /dev/null
@@ -1,8 +0,0 @@
-
-
-
-
- 8001
- **
-
-
\ No newline at end of file
diff --git a/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-benchmarks/pom.xml b/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-benchmarks/pom.xml
index ecf1529ee6..8b8d70c66e 100644
--- a/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-benchmarks/pom.xml
+++ b/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-benchmarks/pom.xml
@@ -3,7 +3,7 @@
com.google.cloud.bigtablebigtable-hbase-1.x-parent
- 2.0.0-alpha3-SNAPSHOT
+ 2.0.0-beta64.0.0
@@ -74,7 +74,7 @@
org.apache.maven.pluginsmaven-deploy-plugin
- 3.0.0-M1
+ 3.0.0-M2true
diff --git a/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-benchmarks/src/main/java/com/google/cloud/bigtable/hbase1_x/BenchmarkSetupUtils.java b/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-benchmarks/src/main/java/com/google/cloud/bigtable/hbase1_x/BenchmarkSetupUtils.java
index ca0b1c08ad..8c3da61847 100644
--- a/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-benchmarks/src/main/java/com/google/cloud/bigtable/hbase1_x/BenchmarkSetupUtils.java
+++ b/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-benchmarks/src/main/java/com/google/cloud/bigtable/hbase1_x/BenchmarkSetupUtils.java
@@ -18,7 +18,6 @@
import static com.google.cloud.bigtable.hbase.BigtableOptionsFactory.BIGTABLE_BULK_AUTOFLUSH_MS_KEY;
import static com.google.cloud.bigtable.hbase.BigtableOptionsFactory.BIGTABLE_BULK_MAX_ROW_KEY_COUNT;
import static com.google.cloud.bigtable.hbase.BigtableOptionsFactory.BIGTABLE_USE_BATCH;
-import static com.google.cloud.bigtable.hbase.BigtableOptionsFactory.BIGTABLE_USE_GCJ_CLIENT;
import static com.google.cloud.bigtable.hbase1_x.BigtableBenchmark.COL_FAMILY;
import static com.google.cloud.bigtable.hbase1_x.BigtableBenchmark.READ_ROW_PREFIX;
import static com.google.cloud.bigtable.hbase1_x.BigtableBenchmark.SAMPLE_TIMESTAMP;
@@ -47,13 +46,11 @@ class BenchmarkSetupUtils {
private static final Pattern CELL_PATTERN = Pattern.compile("cellsPerRow/(\\d+)/cellSize/(\\d+)");
private static final Random random = new Random();
- static Connection createConnection(
- String projectId, String instanceId, boolean useBatch, boolean useGcj) {
+ static Connection createConnection(String projectId, String instanceId, boolean useBatch) {
Configuration config = BigtableConfiguration.configure(projectId, instanceId);
config.set(BIGTABLE_USE_BATCH, String.valueOf(useBatch));
config.set(BIGTABLE_BULK_AUTOFLUSH_MS_KEY, String.valueOf(100));
config.set(BIGTABLE_BULK_MAX_ROW_KEY_COUNT, String.valueOf(3000));
- config.set(BIGTABLE_USE_GCJ_CLIENT, String.valueOf(useGcj));
return BigtableConfiguration.connect(config);
}
diff --git a/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-benchmarks/src/main/java/com/google/cloud/bigtable/hbase1_x/BigtableBenchmark.java b/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-benchmarks/src/main/java/com/google/cloud/bigtable/hbase1_x/BigtableBenchmark.java
index 77e771efff..13f23f256d 100644
--- a/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-benchmarks/src/main/java/com/google/cloud/bigtable/hbase1_x/BigtableBenchmark.java
+++ b/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-benchmarks/src/main/java/com/google/cloud/bigtable/hbase1_x/BigtableBenchmark.java
@@ -91,9 +91,6 @@ public class BigtableBenchmark {
})
private String rowShape;
- @Param({"true", "false"})
- private boolean useGcj;
-
@Param("true")
private boolean useBatch;
@@ -103,7 +100,7 @@ public class BigtableBenchmark {
@Setup
public void setUp() throws IOException {
- connection = BenchmarkSetupUtils.createConnection(projectId, instanceId, useBatch, useGcj);
+ connection = BenchmarkSetupUtils.createConnection(projectId, instanceId, useBatch);
rowShapeParams = new RowShapeParams(rowShape);
table = connection.getTable(rowShapeParams.tableName);
diff --git a/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-hadoop/pom.xml b/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-hadoop/pom.xml
index c6e382689e..64e30cc10c 100644
--- a/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-hadoop/pom.xml
+++ b/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-hadoop/pom.xml
@@ -18,7 +18,7 @@ limitations under the License.
bigtable-hbase-1.x-parentcom.google.cloud.bigtable
- 2.0.0-alpha3-SNAPSHOT
+ 2.0.0-beta64.0.0
@@ -39,7 +39,7 @@ limitations under the License.
${project.groupId}bigtable-hbase-1.x-shaded
- 2.0.0-alpha3-SNAPSHOT
+ 2.0.0-beta6
@@ -197,7 +197,7 @@ limitations under the License.
com.google.cloud.bigtable.testbigtable-build-helper
- 2.0.0-alpha3-SNAPSHOT
+ 2.0.0-beta6
diff --git a/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-integration-tests/pom.xml b/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-integration-tests/pom.xml
index e05ae07fc9..7e609a8ebe 100644
--- a/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-integration-tests/pom.xml
+++ b/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-integration-tests/pom.xml
@@ -20,7 +20,7 @@ limitations under the License.
com.google.cloud.bigtablebigtable-hbase-1.x-parent
- 2.0.0-alpha3-SNAPSHOT
+ 2.0.0-beta6bigtable-hbase-1.x-integration-tests
@@ -126,7 +126,7 @@ limitations under the License.
${project.groupId}bigtable-emulator-maven-plugin
- 2.0.0-alpha3-SNAPSHOT
+ 2.0.0-beta6
@@ -210,7 +210,7 @@ limitations under the License.
com.google.cloud.bigtablebigtable-hbase
- 2.0.0-alpha3-SNAPSHOT
+ 2.0.0-beta6test
@@ -224,7 +224,7 @@ limitations under the License.
${project.groupId}bigtable-hbase-1.x
- 2.0.0-alpha3-SNAPSHOT
+ 2.0.0-beta6test
@@ -238,7 +238,7 @@ limitations under the License.
${project.groupId}bigtable-hbase-integration-tests-common
- 2.0.0-alpha3-SNAPSHOT
+ 2.0.0-beta6test-jartest
@@ -384,7 +384,7 @@ limitations under the License.
org.apache.maven.pluginsmaven-deploy-plugin
- 3.0.0-M1
+ 3.0.0-M2true
diff --git a/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-integration-tests/src/test/java/com/google/cloud/bigtable/hbase/TestAuth.java b/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-integration-tests/src/test/java/com/google/cloud/bigtable/hbase/TestAuth.java
index 80bb7e6fb7..81d655618e 100644
--- a/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-integration-tests/src/test/java/com/google/cloud/bigtable/hbase/TestAuth.java
+++ b/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-integration-tests/src/test/java/com/google/cloud/bigtable/hbase/TestAuth.java
@@ -34,9 +34,6 @@ public class TestAuth extends AbstractTest {
@Test
public void testBatchJwt() throws IOException {
Assume.assumeTrue("Batch JWT can only run against Bigtable", sharedTestEnv.isBigtable());
- Assume.assumeFalse(
- "GCJ client does not support cachedDataPool",
- Boolean.getBoolean("google.bigtable.use.gcj.client"));
String currentEndpoint = sharedTestEnv.getConfiguration().get("google.bigtable.endpoint.host");
Assume.assumeTrue(
diff --git a/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-integration-tests/src/test/java/com/google/cloud/bigtable/hbase/TestRpcRetryBehavior.java b/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-integration-tests/src/test/java/com/google/cloud/bigtable/hbase/TestRpcRetryBehavior.java
index 65d7bac942..4e83e42fd7 100644
--- a/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-integration-tests/src/test/java/com/google/cloud/bigtable/hbase/TestRpcRetryBehavior.java
+++ b/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-integration-tests/src/test/java/com/google/cloud/bigtable/hbase/TestRpcRetryBehavior.java
@@ -208,7 +208,6 @@ protected Connection makeConnection(Map customConnProps) throws
}
// retry on aborted to differentiate server hang an explicit server error
config.set(BigtableOptionsFactory.ADDITIONAL_RETRY_CODES, "ABORTED");
- config.setBoolean(BigtableOptionsFactory.BIGTABLE_USE_GCJ_CLIENT, true);
config.setInt(BigtableOptionsFactory.BIGTABLE_DATA_CHANNEL_COUNT_KEY, 1);
return ConnectionFactory.createConnection(config);
}
diff --git a/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-mapreduce/pom.xml b/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-mapreduce/pom.xml
index 9a8052b4c8..2fa2a342e7 100644
--- a/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-mapreduce/pom.xml
+++ b/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-mapreduce/pom.xml
@@ -20,7 +20,7 @@ limitations under the License.
com.google.cloud.bigtablebigtable-hbase-1.x-parent
- 2.0.0-alpha3-SNAPSHOT
+ 2.0.0-beta6bigtable-hbase-1.x-mapreduce
@@ -36,7 +36,7 @@ limitations under the License.
1.1.0
- 0.130.2
+ 0.142.0
+ 2.0.0-beta6
@@ -225,7 +225,7 @@ limitations under the License.
com.google.cloud.bigtable.testbigtable-build-helper
- 2.0.0-alpha3-SNAPSHOT
+ 2.0.0-beta6verify-mirror-deps
diff --git a/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-shaded/pom.xml b/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-shaded/pom.xml
index 628d4ecd25..ee21c89b2f 100644
--- a/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-shaded/pom.xml
+++ b/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-shaded/pom.xml
@@ -18,7 +18,7 @@ limitations under the License.
bigtable-hbase-1.x-parentcom.google.cloud.bigtable
- 2.0.0-alpha3-SNAPSHOT
+ 2.0.0-beta64.0.0
@@ -68,7 +68,7 @@ limitations under the License.
${project.groupId}bigtable-hbase-1.x
- 2.0.0-alpha3-SNAPSHOT
+ 2.0.0-beta6com.google.cloud.bigtable
@@ -159,6 +159,7 @@ limitations under the License.
META-INF/NOTICEMETA-INF/DEPENDENCIESMETA-INF/NOTICE.txt
+ META-INF/versions/9/module-info.class
@@ -182,6 +183,7 @@ limitations under the License.
log4j:log4jorg.conscrypt:conscrypt-openjdk-uber
+ org.apache.htrace:htrace-core4
@@ -330,6 +332,10 @@ limitations under the License.
javax.annotationcom.google.bigtable.repackaged.javax.annotation
+
+ net.bytebuddy
+ com.google.bigtable.repackaged.net.bytebuddy
+
@@ -338,7 +344,7 @@ limitations under the License.
com.google.cloud.bigtable.testbigtable-build-helper
- 2.0.0-alpha3-SNAPSHOT
+ 2.0.0-beta6
diff --git a/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-tools/README.md b/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-tools/README.md
index a29c384631..1a3a3fae7d 100644
--- a/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-tools/README.md
+++ b/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-tools/README.md
@@ -24,7 +24,7 @@ cd bigtable-hbase-1.x-parent/bigtable-hbase-1.x-tools
## Schema Translation tool
This tool will create tables in Cloud Bigtable based on the tables in an HBase cluster.
-You specifiy a [name regex](https://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true)
+You specify a [name regex](https://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true)
and it will copy column families, garbage collection rules,
and table splits.
@@ -45,7 +45,7 @@ and table splits.
-Dgoogle.bigtable.table.filter=$TABLE_NAME_REGEX \
-Dhbase.zookeeper.quorum=$ZOOKEEPER_QUORUM \
-Dhbase.zookeeper.property.clientPort=$ZOOKEEPER_PORT \
- -jar bigtable-hbase-1.x-tools-2.0.0-alpha1-with-dependencies.jar
+ -jar target/bigtable-hbase-1.x-tools-2.0.0-alpha1-with-dependencies.jar
```
### Alternative: Exporting Schema
@@ -67,7 +67,7 @@ export the HBase schema to a file and use that to create tables in Cloud Bigtabl
-Dgoogle.bigtable.output.filepath=$HBASE_EXPORT_PATH \
-Dhbase.zookeeper.quorum=$ZOOKEEPER_QUORUM \
-Dhbase.zookeeper.property.clientPort=$ZOOKEEPER_PORT \
- -jar bigtable-hbase-1.x-tools-2.0.0-alpha1-with-dependencies.jar
+ -jar target/bigtable-hbase-1.x-tools-2.0.0-alpha1-with-dependencies.jar
```
#### Import schema
@@ -83,5 +83,36 @@ export the HBase schema to a file and use that to create tables in Cloud Bigtabl
-Dgoogle.bigtable.project.id=$PROJECT_ID \
-Dgoogle.bigtable.instance.id=$INSTANCE_ID \
-Dgoogle.bigtable.input.filepath=$SCHEMA_FILE_PATH \
- -jar bigtable-hbase-1.x-tools-2.0.0-alpha1-with-dependencies.jar \
+ -jar target/bigtable-hbase-1.x-tools-2.0.0-alpha1-with-dependencies.jar \
```
+
+### Table name renaming
+
+There are cases where you can not use the HBase table name in Cloud Bigtable,
+for example, if the table is in custom namespace. In such cases, you can provide
+a mapping from old-name->new-name to the schema traslator tool, in form of a
+JSON file. The file should contain a flat JSON map like
+
+ ```
+ {
+ “ns:hbase-tablename”: “cloud-bigtable-tablename”
+ }
+ ```
+
+You can then pass a path of this file to schema translator using system
+property `google.bigtable.schema.mapping.filepath`. Schema translator will
+create a table named `cloud-bigtable-tablename` for table named
+`hbase-tablename` in namespace `ns`.
+
+ ```
+ SCHEMA_MAPPING_FILE_PATH=path/to/table-name-mapping.json
+ java \
+ -Dgoogle.bigtable.project.id=$PROJECT_ID \
+ -Dgoogle.bigtable.instance.id=$INSTANCE_ID \
+ -Dgoogle.bigtable.table.filter=$TABLE_NAME_REGEX \
+ -Dhbase.zookeeper.quorum=$ZOOKEEPER_QUORUM \
+ -Dhbase.zookeeper.property.clientPort=$ZOOKEEPER_PORT \
+ -Dgoogle.bigtable.schema.mapping.filepath=$SCHEMA_MAPPING_FILE_PATH \
+ -jar target/bigtable-hbase-1.x-tools-2.0.0-alpha1-with-dependencies.jar
+
+ ```
\ No newline at end of file
diff --git a/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-tools/pom.xml b/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-tools/pom.xml
index 662f4b88b1..f8bceabb5b 100644
--- a/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-tools/pom.xml
+++ b/bigtable-hbase-1.x-parent/bigtable-hbase-1.x-tools/pom.xml
@@ -5,7 +5,7 @@
bigtable-hbase-1.x-parentcom.google.cloud.bigtable
- 2.0.0-alpha3-SNAPSHOT
+ 2.0.0-beta64.0.0
@@ -23,7 +23,7 @@
${project.groupId}bigtable-hbase-1.x-shaded
- 2.0.0-alpha3-SNAPSHOT
+ 2.0.0-beta6
+
+
+
+
+
\ No newline at end of file
diff --git a/bigtable-hbase-1.x-parent/bigtable-hbase-1.x/pom.xml b/bigtable-hbase-1.x-parent/bigtable-hbase-1.x/pom.xml
index 90ff4c5e3e..de11f81f31 100644
--- a/bigtable-hbase-1.x-parent/bigtable-hbase-1.x/pom.xml
+++ b/bigtable-hbase-1.x-parent/bigtable-hbase-1.x/pom.xml
@@ -32,7 +32,7 @@ limitations under the License.
com.google.cloud.bigtablebigtable-hbase-1.x-parent
- 2.0.0-alpha3-SNAPSHOT
+ 2.0.0-beta6bigtable-hbase-1.x
@@ -73,7 +73,7 @@ limitations under the License.
com.google.cloud.bigtablebigtable-hbase
- 2.0.0-alpha3-SNAPSHOT
+ 2.0.0-beta6com.google.cloud.bigtable
@@ -163,18 +163,24 @@ limitations under the License.
grpc-google-cloud-bigtable-v2compile
+
+ net.bytebuddy
+ byte-buddy
+ 1.12.6
+
+
com.google.cloud.bigtablebigtable-internal-test-helper
- 2.0.0-alpha3-SNAPSHOT
+ 2.0.0-beta6testcom.google.truthtruth
- 1.1.2
+ 1.1.3test
diff --git a/bigtable-hbase-1.x-parent/bigtable-hbase-1.x/src/main/java/com/google/cloud/bigtable/hbase1_x/BigtableAdmin.java b/bigtable-hbase-1.x-parent/bigtable-hbase-1.x/src/main/java/com/google/cloud/bigtable/hbase1_x/BigtableAdmin.java
index 849cf65c3b..e800e048e6 100644
--- a/bigtable-hbase-1.x-parent/bigtable-hbase-1.x/src/main/java/com/google/cloud/bigtable/hbase1_x/BigtableAdmin.java
+++ b/bigtable-hbase-1.x-parent/bigtable-hbase-1.x/src/main/java/com/google/cloud/bigtable/hbase1_x/BigtableAdmin.java
@@ -16,35 +16,31 @@
package com.google.cloud.bigtable.hbase1_x;
import com.google.api.core.InternalApi;
+import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import com.google.common.util.concurrent.Futures;
import java.io.IOException;
+import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
-import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.regex.Pattern;
+import net.bytebuddy.ByteBuddy;
+import net.bytebuddy.implementation.InvocationHandlerAdapter;
+import net.bytebuddy.matcher.ElementMatchers;
import org.apache.hadoop.hbase.ClusterStatus;
-import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableExistsException;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.AbstractBigtableAdmin;
+import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.CommonConnection;
-import org.apache.hadoop.hbase.client.security.SecurityCapability;
-import org.apache.hadoop.hbase.protobuf.generated.AdminProtos;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
-import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos;
-import org.apache.hadoop.hbase.quotas.QuotaFilter;
-import org.apache.hadoop.hbase.quotas.QuotaRetriever;
-import org.apache.hadoop.hbase.quotas.QuotaSettings;
-import org.apache.hadoop.hbase.snapshot.HBaseSnapshotException;
import org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
import org.apache.hadoop.hbase.snapshot.SnapshotCreationException;
-import org.apache.hadoop.hbase.snapshot.UnknownSnapshotException;
/**
* This is an hbase 1.x implementation of {@link AbstractBigtableAdmin}. Most methods in this class
@@ -54,7 +50,7 @@
*/
@InternalApi("For internal usage only")
@SuppressWarnings("deprecation")
-public class BigtableAdmin extends AbstractBigtableAdmin {
+public abstract class BigtableAdmin extends AbstractBigtableAdmin {
public BigtableAdmin(CommonConnection connection) throws IOException {
super(connection);
@@ -85,20 +81,6 @@ public void enableTableAsync(TableName tableName) throws IOException {
enableTable(tableName);
}
- /** {@inheritDoc} */
- @Override
- public AdminProtos.GetRegionInfoResponse.CompactionState getCompactionState(TableName tableName)
- throws IOException {
- throw new UnsupportedOperationException("getCompactionState");
- }
-
- /** {@inheritDoc} */
- @Override
- public AdminProtos.GetRegionInfoResponse.CompactionState getCompactionStateForRegion(byte[] bytes)
- throws IOException {
- throw new UnsupportedOperationException("getCompactionStateForRegion");
- }
-
/**
* {@inheritDoc}
*
@@ -120,19 +102,6 @@ public void snapshot(HBaseProtos.SnapshotDescription snapshot)
snapshot(snapshot.getName(), TableName.valueOf(snapshot.getTable()));
}
- /** {@inheritDoc} */
- @Override
- public boolean isSnapshotFinished(HBaseProtos.SnapshotDescription snapshot)
- throws IOException, HBaseSnapshotException, UnknownSnapshotException {
- throw new UnsupportedOperationException("isSnapshotFinished"); // TODO
- }
-
- @Override
- public void restoreSnapshot(String s, boolean b, boolean b1)
- throws IOException, RestoreSnapshotException {
- throw new UnsupportedOperationException("restoreSnapshot"); // TODO
- }
-
@Override
public void cloneSnapshot(String s, TableName tableName, boolean b)
throws IOException, TableExistsException, RestoreSnapshotException {
@@ -179,125 +148,6 @@ public List listSnapshots(Pattern pattern) thro
return response;
}
- @Override
- public List listTableSnapshots(
- String tableNameRegex, String snapshotNameRegex) throws IOException {
- throw new UnsupportedOperationException("Unsupported - please use listSnapshots");
- }
-
- @Override
- public List listTableSnapshots(
- Pattern tableNamePattern, Pattern snapshotNamePattern) throws IOException {
- throw new UnsupportedOperationException("Unsupported - please use listSnapshots");
- }
-
- @Override
- public boolean isBalancerEnabled() throws IOException {
- throw new UnsupportedOperationException("isBalancerEnabled"); // TODO
- }
-
- @Override
- public long getLastMajorCompactionTimestamp(TableName tableName) throws IOException {
- throw new UnsupportedOperationException("getLastMajorCompactionTimestamp"); // TODO
- }
-
- @Override
- public long getLastMajorCompactionTimestampForRegion(byte[] regionName) throws IOException {
- throw new UnsupportedOperationException("getLastMajorCompactionTimestampForRegion"); // TODO
- }
-
- @Override
- public void setQuota(QuotaSettings quota) throws IOException {
- throw new UnsupportedOperationException("setQuota"); // TODO
- }
-
- @Override
- public QuotaRetriever getQuotaRetriever(QuotaFilter filter) throws IOException {
- throw new UnsupportedOperationException("getQuotaRetriever"); // TODO
- }
-
- @Override
- public boolean normalize() throws IOException {
- throw new UnsupportedOperationException("normalize"); // TODO
- }
-
- @Override
- public boolean isNormalizerEnabled() throws IOException {
- throw new UnsupportedOperationException("isNormalizerEnabled"); // TODO
- }
-
- @Override
- public boolean setNormalizerRunning(boolean on) throws IOException {
- throw new UnsupportedOperationException("setNormalizerRunning"); // TODO
- }
-
- @Override
- public boolean setCleanerChoreRunning(boolean b) throws IOException {
- throw new UnsupportedOperationException("setCleanerChoreRunning"); // TODO
- }
-
- @Override
- public boolean runCleanerChore() throws IOException {
- throw new UnsupportedOperationException("runCleanerChore"); // TODO
- }
-
- @Override
- public boolean isCleanerChoreEnabled() throws IOException {
- throw new UnsupportedOperationException("isCleanerChoreEnabled"); // TODO
- }
-
- @Override
- public boolean isMasterInMaintenanceMode() throws IOException {
- throw new UnsupportedOperationException("isMasterInMaintenanceMode"); // TODO
- }
-
- @Override
- public boolean abortProcedure(long procId, boolean mayInterruptIfRunning) throws IOException {
- throw new UnsupportedOperationException("abortProcedure"); // TODO
- }
-
- @Override
- public ProcedureInfo[] listProcedures() throws IOException {
- throw new UnsupportedOperationException("listProcedures"); // TODO
- }
-
- @Override
- public Future abortProcedureAsync(long procId, boolean mayInterruptIfRunning)
- throws IOException {
- throw new UnsupportedOperationException("abortProcedureAsync"); // TODO
- }
-
- @Override
- public List getSecurityCapabilities() throws IOException {
- throw new UnsupportedOperationException("getSecurityCapabilities"); // TODO
- }
-
- @Override
- public boolean balancer(boolean arg0) throws IOException {
- throw new UnsupportedOperationException("balancer"); // TODO
- }
-
- @Override
- public boolean isSplitOrMergeEnabled(MasterSwitchType arg0) throws IOException {
- throw new UnsupportedOperationException("isSplitOrMergeEnabled"); // TODO
- }
-
- @Override
- public List listDeadServers() throws IOException {
- throw new UnsupportedOperationException("listDeadServers"); // TODO
- }
-
- @Override
- public List clearDeadServers(List list) throws IOException {
- throw new UnsupportedOperationException("clearDeadServers"); // TODO
- }
-
- @Override
- public boolean[] setSplitOrMergeEnabled(boolean arg0, boolean arg1, MasterSwitchType... arg2)
- throws IOException {
- throw new UnsupportedOperationException("setSplitOrMergeEnabled"); // TODO
- }
-
@Override
public ClusterStatus getClusterStatus() throws IOException {
return new ClusterStatus() {
@@ -307,4 +157,44 @@ public Collection getServers() {
}
};
}
+
+ private static Class extends BigtableAdmin> adminClass = null;
+
+ /**
+ * This is a workaround for incompatible changes in hbase minor versions. Dynamically generates a
+ * class that extends BigtableAdmin so incompatible methods won't be accessed unless the methods
+ * are called. If a method is implemented by BigtableAdmin, the generated class will invoke the
+ * implementation in BigtableAdmin. Otherwise it'll throw {@link UnsupportedOperationException}.
+ */
+ private static synchronized Class extends BigtableAdmin> getSubclass() {
+ if (adminClass == null) {
+ adminClass =
+ new ByteBuddy()
+ .subclass(BigtableAdmin.class)
+ .method(ElementMatchers.isAbstract())
+ .intercept(
+ InvocationHandlerAdapter.of(
+ new AbstractBigtableAdmin.UnsupportedOperationsHandler()))
+ .make()
+ .load(BigtableAdmin.class.getClassLoader())
+ .getLoaded();
+ }
+ return adminClass;
+ }
+
+ public static Admin createInstance(CommonConnection connection) throws IOException {
+ try {
+ return getSubclass().getDeclaredConstructor(CommonConnection.class).newInstance(connection);
+ } catch (InvocationTargetException e) {
+ // Unwrap and throw IOException or RuntimeException as is, and convert all other exceptions to
+ // IOException because
+ // org.apache.hadoop.hbase.client.Connection#getAdmin() only throws
+ // IOException
+ Throwables.throwIfInstanceOf(e.getTargetException(), IOException.class);
+ Throwables.throwIfInstanceOf(e.getTargetException(), RuntimeException.class);
+ throw new IOException(e);
+ } catch (Exception e) {
+ throw new IOException(e);
+ }
+ }
}
diff --git a/bigtable-hbase-1.x-parent/bigtable-hbase-1.x/src/main/java/com/google/cloud/bigtable/hbase1_x/BigtableConnection.java b/bigtable-hbase-1.x-parent/bigtable-hbase-1.x/src/main/java/com/google/cloud/bigtable/hbase1_x/BigtableConnection.java
index 0ae8134a0a..ab6c7a9713 100644
--- a/bigtable-hbase-1.x-parent/bigtable-hbase-1.x/src/main/java/com/google/cloud/bigtable/hbase1_x/BigtableConnection.java
+++ b/bigtable-hbase-1.x-parent/bigtable-hbase-1.x/src/main/java/com/google/cloud/bigtable/hbase1_x/BigtableConnection.java
@@ -58,7 +58,12 @@ public BigtableConnection(Configuration conf) throws IOException {
/** {@inheritDoc} */
@Override
public Admin getAdmin() throws IOException {
- return new BigtableAdmin(this);
+ return BigtableAdmin.createInstance(this);
+ }
+
+ @Override
+ public String getClusterId() throws IOException {
+ throw new UnsupportedOperationException("not implemented");
}
protected SampledRowKeysAdapter createSampledRowKeysAdapter(
diff --git a/bigtable-hbase-1.x-parent/bigtable-hbase-1.x/src/test/java/com/google/cloud/bigtable/hbase1_x/TestBigtableAdmin.java b/bigtable-hbase-1.x-parent/bigtable-hbase-1.x/src/test/java/com/google/cloud/bigtable/hbase1_x/TestBigtableAdmin.java
index 6b2bf670b6..b239d82eb8 100644
--- a/bigtable-hbase-1.x-parent/bigtable-hbase-1.x/src/test/java/com/google/cloud/bigtable/hbase1_x/TestBigtableAdmin.java
+++ b/bigtable-hbase-1.x-parent/bigtable-hbase-1.x/src/test/java/com/google/cloud/bigtable/hbase1_x/TestBigtableAdmin.java
@@ -17,6 +17,7 @@
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
import com.google.bigtable.admin.v2.BigtableTableAdminGrpc;
import com.google.bigtable.admin.v2.DropRowRangeRequest;
@@ -94,6 +95,15 @@ public void testDeleteRowRangeByPrefixNonUtf8() throws IOException, InterruptedE
assertEquals(expectedRequest, requestQueue.take());
}
+ @Test
+ public void testUnimplementedMethod() throws Exception {
+ try {
+ admin.getCompactionState(TableName.valueOf(TABLE_ID));
+ } catch (UnsupportedOperationException e) {
+ assertTrue(e.getMessage().contains("getCompactionState"));
+ }
+ }
+
private class RequestInterceptor implements ServerInterceptor {
@Override
public ServerCall.Listener interceptCall(
diff --git a/bigtable-hbase-1.x-parent/bigtable-hbase-1.x/src/test/java/com/google/cloud/bigtable/hbase1_x/TestMetrics.java b/bigtable-hbase-1.x-parent/bigtable-hbase-1.x/src/test/java/com/google/cloud/bigtable/hbase1_x/TestMetrics.java
index f702238f31..ab1ebbf66f 100644
--- a/bigtable-hbase-1.x-parent/bigtable-hbase-1.x/src/test/java/com/google/cloud/bigtable/hbase1_x/TestMetrics.java
+++ b/bigtable-hbase-1.x-parent/bigtable-hbase-1.x/src/test/java/com/google/cloud/bigtable/hbase1_x/TestMetrics.java
@@ -107,7 +107,6 @@ public void setUp() throws IOException {
configuration.set(BigtableOptionsFactory.BIGTABLE_DATA_CHANNEL_COUNT_KEY, "1");
configuration.set(
BigtableOptionsFactory.BIGTABLE_EMULATOR_HOST_KEY, "localhost:" + server.getPort());
- configuration.set(BigtableOptionsFactory.BIGTABLE_USE_GCJ_CLIENT, "true");
fakeMetricRegistry = new FakeMetricRegistry();
BigtableClientMetrics.setMetricRegistry(fakeMetricRegistry);
@@ -297,7 +296,6 @@ public void testActiveSessionsAndChannels() throws IOException {
configuration.set(BigtableOptionsFactory.BIGTABLE_NULL_CREDENTIAL_ENABLE_KEY, "true");
configuration.set(
BigtableOptionsFactory.BIGTABLE_DATA_CHANNEL_COUNT_KEY, String.valueOf(connectionCount));
- configuration.set(BigtableOptionsFactory.BIGTABLE_USE_GCJ_CLIENT, "true");
BigtableConnection newConnection = new BigtableConnection(configuration);
currentActiveSessions =
@@ -325,7 +323,6 @@ public void testChannelPoolCachingActiveChannel() throws Exception {
configuration.set(BigtableOptionsFactory.INSTANCE_ID_KEY, TEST_INSTANCE_ID);
configuration.set(
BigtableOptionsFactory.BIGTABLE_DATA_CHANNEL_COUNT_KEY, String.valueOf(connectionCount));
- configuration.set(BigtableOptionsFactory.BIGTABLE_USE_GCJ_CLIENT, "true");
configuration.set(BigtableOptionsFactory.BIGTABLE_USE_CACHED_DATA_CHANNEL_POOL, "true");
Credentials credentials = NoCredentialsProvider.create().getCredentials();
configuration = BigtableConfiguration.withCredentials(configuration, credentials);
diff --git a/bigtable-hbase-1.x-parent/pom.xml b/bigtable-hbase-1.x-parent/pom.xml
index 0f2529256f..3e87e1fd0c 100644
--- a/bigtable-hbase-1.x-parent/pom.xml
+++ b/bigtable-hbase-1.x-parent/pom.xml
@@ -20,7 +20,7 @@ limitations under the License.
com.google.cloud.bigtablebigtable-client-parent
- 2.0.0-alpha3-SNAPSHOT
+ 2.0.0-beta6bigtable-hbase-1.x-parent
diff --git a/bigtable-hbase-2.x-parent/bigtable-hbase-2.x-hadoop/pom.xml b/bigtable-hbase-2.x-parent/bigtable-hbase-2.x-hadoop/pom.xml
index a229e90938..af57d63a4d 100644
--- a/bigtable-hbase-2.x-parent/bigtable-hbase-2.x-hadoop/pom.xml
+++ b/bigtable-hbase-2.x-parent/bigtable-hbase-2.x-hadoop/pom.xml
@@ -18,7 +18,7 @@ limitations under the License.
bigtable-hbase-2.x-parentcom.google.cloud.bigtable
- 2.0.0-alpha3-SNAPSHOT
+ 2.0.0-beta64.0.0
@@ -39,7 +39,7 @@ limitations under the License.
${project.groupId}bigtable-hbase-2.x-shaded
- 2.0.0-alpha3-SNAPSHOT
+ 2.0.0-beta6org.apache.hbase
@@ -203,7 +203,7 @@ limitations under the License.
com.google.cloud.bigtable.testbigtable-build-helper
- 2.0.0-alpha3-SNAPSHOT
+ 2.0.0-beta6
diff --git a/bigtable-hbase-2.x-parent/bigtable-hbase-2.x-integration-tests/pom.xml b/bigtable-hbase-2.x-parent/bigtable-hbase-2.x-integration-tests/pom.xml
index 83130fa6c8..16156e1351 100644
--- a/bigtable-hbase-2.x-parent/bigtable-hbase-2.x-integration-tests/pom.xml
+++ b/bigtable-hbase-2.x-parent/bigtable-hbase-2.x-integration-tests/pom.xml
@@ -20,7 +20,7 @@ limitations under the License.
com.google.cloud.bigtablebigtable-hbase-2.x-parent
- 2.0.0-alpha3-SNAPSHOT
+ 2.0.0-beta6bigtable-hbase-2.x-integration-tests
@@ -130,7 +130,7 @@ limitations under the License.
${project.groupId}bigtable-emulator-maven-plugin
- 2.0.0-alpha3-SNAPSHOT
+ 2.0.0-beta6
@@ -207,7 +207,7 @@ limitations under the License.
com.google.cloud.bigtablebigtable-hbase
- 2.0.0-alpha3-SNAPSHOT
+ 2.0.0-beta6test
@@ -221,7 +221,7 @@ limitations under the License.
${project.groupId}bigtable-hbase-2.x
- 2.0.0-alpha3-SNAPSHOT
+ 2.0.0-beta6test
@@ -235,7 +235,7 @@ limitations under the License.
${project.groupId}bigtable-hbase-integration-tests-common
- 2.0.0-alpha3-SNAPSHOT
+ 2.0.0-beta6test-jartest
@@ -274,7 +274,6 @@ limitations under the License.
com.google.guavaguava
- ${guava.version}test
@@ -321,7 +320,7 @@ limitations under the License.
org.apache.maven.pluginsmaven-deploy-plugin
- 3.0.0-M1
+ 3.0.0-M2true
diff --git a/bigtable-hbase-2.x-parent/bigtable-hbase-2.x-shaded/pom.xml b/bigtable-hbase-2.x-parent/bigtable-hbase-2.x-shaded/pom.xml
index 15ba8b0963..1c72321366 100644
--- a/bigtable-hbase-2.x-parent/bigtable-hbase-2.x-shaded/pom.xml
+++ b/bigtable-hbase-2.x-parent/bigtable-hbase-2.x-shaded/pom.xml
@@ -18,7 +18,7 @@ limitations under the License.
bigtable-hbase-2.x-parentcom.google.cloud.bigtable
- 2.0.0-alpha3-SNAPSHOT
+ 2.0.0-beta64.0.0
@@ -58,7 +58,7 @@ limitations under the License.
${project.groupId}bigtable-hbase-2.x
- 2.0.0-alpha3-SNAPSHOT
+ 2.0.0-beta6com.google.cloud.bigtable
@@ -152,6 +152,7 @@ limitations under the License.
META-INF/NOTICEMETA-INF/DEPENDENCIESMETA-INF/NOTICE.txt
+ META-INF/versions/9/module-info.class
@@ -321,6 +322,10 @@ limitations under the License.
javax.annotationcom.google.bigtable.repackaged.javax.annotation
+
+ net.bytebuddy
+ com.google.bigtable.repackaged.net.bytebuddy
+
@@ -329,7 +334,7 @@ limitations under the License.
com.google.cloud.bigtable.testbigtable-build-helper
- 2.0.0-alpha3-SNAPSHOT
+ 2.0.0-beta6
diff --git a/bigtable-hbase-2.x-parent/bigtable-hbase-2.x/pom.xml b/bigtable-hbase-2.x-parent/bigtable-hbase-2.x/pom.xml
index d784a4ef49..b1c8eafd85 100644
--- a/bigtable-hbase-2.x-parent/bigtable-hbase-2.x/pom.xml
+++ b/bigtable-hbase-2.x-parent/bigtable-hbase-2.x/pom.xml
@@ -20,7 +20,7 @@ limitations under the License.
com.google.cloud.bigtablebigtable-hbase-2.x-parent
- 2.0.0-alpha3-SNAPSHOT
+ 2.0.0-beta6
@@ -60,7 +60,7 @@ limitations under the License.
${project.groupId}bigtable-hbase
- 2.0.0-alpha3-SNAPSHOT
+ 2.0.0-beta6
@@ -142,6 +142,11 @@ limitations under the License.
org.threetenthreetenbp
+
+ net.bytebuddy
+ byte-buddy
+ 1.12.6
+
diff --git a/bigtable-hbase-2.x-parent/bigtable-hbase-2.x/src/main/java/com/google/cloud/bigtable/hbase2_x/BigtableAdmin.java b/bigtable-hbase-2.x-parent/bigtable-hbase-2.x/src/main/java/com/google/cloud/bigtable/hbase2_x/BigtableAdmin.java
index b6ffdc71c0..9892550871 100644
--- a/bigtable-hbase-2.x-parent/bigtable-hbase-2.x/src/main/java/com/google/cloud/bigtable/hbase2_x/BigtableAdmin.java
+++ b/bigtable-hbase-2.x-parent/bigtable-hbase-2.x/src/main/java/com/google/cloud/bigtable/hbase2_x/BigtableAdmin.java
@@ -18,57 +18,38 @@
import com.google.api.core.InternalApi;
import com.google.cloud.bigtable.hbase.util.ModifyTableBuilder;
import com.google.cloud.bigtable.hbase2_x.adapters.admin.TableAdapter2x;
+import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import com.google.common.util.concurrent.Futures;
import java.io.IOException;
+import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.List;
-import java.util.Map;
import java.util.Objects;
-import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionException;
import java.util.concurrent.Future;
import java.util.regex.Pattern;
-import org.apache.hadoop.hbase.CacheEvictionStats;
+import net.bytebuddy.ByteBuddy;
+import net.bytebuddy.implementation.InvocationHandlerAdapter;
+import net.bytebuddy.matcher.ElementMatchers;
import org.apache.hadoop.hbase.ClusterMetrics;
import org.apache.hadoop.hbase.ClusterMetrics.Option;
import org.apache.hadoop.hbase.ClusterStatus;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.NamespaceDescriptor;
-import org.apache.hadoop.hbase.RegionMetrics;
-import org.apache.hadoop.hbase.ServerName;
-import org.apache.hadoop.hbase.TableExistsException;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.AbstractBigtableAdmin;
import org.apache.hadoop.hbase.client.AbstractBigtableConnection;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-import org.apache.hadoop.hbase.client.CompactType;
-import org.apache.hadoop.hbase.client.CompactionState;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.SnapshotDescription;
import org.apache.hadoop.hbase.client.SnapshotType;
import org.apache.hadoop.hbase.client.TableDescriptor;
-import org.apache.hadoop.hbase.client.replication.TableCFs;
-import org.apache.hadoop.hbase.client.security.SecurityCapability;
-import org.apache.hadoop.hbase.quotas.QuotaFilter;
-import org.apache.hadoop.hbase.quotas.QuotaRetriever;
-import org.apache.hadoop.hbase.quotas.QuotaSettings;
-import org.apache.hadoop.hbase.quotas.SpaceQuotaSnapshotView;
-import org.apache.hadoop.hbase.replication.ReplicationException;
-import org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-import org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-import org.apache.hadoop.hbase.security.access.GetUserPermissionsRequest;
-import org.apache.hadoop.hbase.security.access.Permission;
-import org.apache.hadoop.hbase.security.access.UserPermission;
-import org.apache.hadoop.hbase.snapshot.HBaseSnapshotException;
-import org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
import org.apache.hadoop.hbase.snapshot.SnapshotCreationException;
-import org.apache.hadoop.hbase.snapshot.UnknownSnapshotException;
import org.apache.hadoop.hbase.util.Bytes;
/**
@@ -77,13 +58,13 @@
*
For internal use only - public for technical reasons.
*/
@InternalApi("For internal usage only")
-public class BigtableAdmin extends AbstractBigtableAdmin {
+public abstract class BigtableAdmin extends AbstractBigtableAdmin {
private final BigtableAsyncAdmin asyncAdmin;
public BigtableAdmin(AbstractBigtableConnection connection) throws IOException {
super(connection);
- asyncAdmin = new BigtableAsyncAdmin(connection);
+ asyncAdmin = BigtableAsyncAdmin.createInstance(connection);
}
/** {@inheritDoc} */
@@ -151,11 +132,6 @@ public List listSnapshots() throws IOException {
return response;
}
- @Override
- public void deleteSnapshots(Pattern pattern) throws IOException {
- throw new UnsupportedOperationException("use deleteSnapshot instead");
- }
-
/**
* {@inheritDoc}
*
@@ -302,18 +278,6 @@ public List listTableDescriptorsByNamespace(byte[] namespace)
return Arrays.asList(listTableDescriptorsByNamespace(namespaceStr));
}
- @Override
- public List listTableSnapshots(String tableName, String snapshotId)
- throws IOException {
- throw new UnsupportedOperationException("Unsupported - please use listSnapshots");
- }
-
- @Override
- public List listTableSnapshots(Pattern tableName, Pattern snapshotName)
- throws IOException {
- throw new UnsupportedOperationException("Unsupported - please use listSnapshots");
- }
-
@Override
public Future modifyColumnFamilyAsync(
TableName tableName, ColumnFamilyDescriptor columnFamily) throws IOException {
@@ -365,229 +329,6 @@ public Future truncateTableAsync(TableName tableName, boolean preserveSpli
return ApiFutureUtils.toCompletableFuture(
adminClientWrapper.dropAllRowsAsync(tableName.getNameAsString()));
}
- /* ******* Unsupported methods *********** */
-
- @Override
- public boolean abortProcedure(long arg0, boolean arg1) throws IOException {
- throw new UnsupportedOperationException("abortProcedure");
- }
-
- @Override
- public Future abortProcedureAsync(long arg0, boolean arg1) throws IOException {
- throw new UnsupportedOperationException("abortProcedureAsync");
- }
-
- @Override
- public boolean balance() throws IOException {
- throw new UnsupportedOperationException("balance");
- }
-
- @Override
- public boolean balance(boolean arg0) throws IOException {
- throw new UnsupportedOperationException("balance");
- }
-
- @Override
- public boolean balancerSwitch(boolean arg0, boolean arg1) throws IOException {
- throw new UnsupportedOperationException("balancerSwitch");
- }
-
- @Override
- public boolean catalogJanitorSwitch(boolean arg0) throws IOException {
- throw new UnsupportedOperationException("catalogJanitorSwitch");
- }
-
- @Override
- public boolean cleanerChoreSwitch(boolean arg0) throws IOException {
- throw new UnsupportedOperationException("cleanerChoreSwitch");
- }
-
- @Override
- public void clearCompactionQueues(ServerName arg0, Set arg1)
- throws IOException, InterruptedException {
- throw new UnsupportedOperationException("clearCompactionQueues");
- }
-
- @Override
- public List clearDeadServers(List arg0) throws IOException {
- throw new UnsupportedOperationException("clearDeadServers");
- }
-
- @Override
- public void cloneSnapshot(String arg0, TableName arg1, boolean arg2)
- throws IOException, TableExistsException, RestoreSnapshotException {
- throw new UnsupportedOperationException("cloneSnapshot");
- }
-
- @Override
- public Future cloneSnapshotAsync(String arg0, TableName arg1)
- throws IOException, TableExistsException {
- throw new UnsupportedOperationException("cloneSnapshotAsync");
- }
-
- @Override
- public void cloneTableSchema(TableName tableName, TableName tableName1, boolean b) {
- throw new UnsupportedOperationException("cloneTableSchema"); // TODO
- }
-
- @Override
- public boolean switchRpcThrottle(boolean enable) throws IOException {
- throw new UnsupportedOperationException("switchRpcThrottle");
- }
-
- @Override
- public boolean isRpcThrottleEnabled() throws IOException {
- throw new UnsupportedOperationException("isRpcThrottleEnabled");
- }
-
- @Override
- public boolean exceedThrottleQuotaSwitch(boolean b) throws IOException {
- throw new UnsupportedOperationException("exceedThrottleQuotaSwitch");
- }
-
- @Override
- public Map getSpaceQuotaTableSizes() throws IOException {
- throw new UnsupportedOperationException("getSpaceQuotaTableSizes");
- }
-
- @Override
- public Map getRegionServerSpaceQuotaSnapshots(
- ServerName serverName) throws IOException {
- throw new UnsupportedOperationException("getRegionServerSpaceQuotaSnapshots");
- }
-
- @Override
- public SpaceQuotaSnapshotView getCurrentSpaceQuotaSnapshot(String namespace) throws IOException {
- throw new UnsupportedOperationException("getCurrentSpaceQuotaSnapshot");
- }
-
- @Override
- public SpaceQuotaSnapshotView getCurrentSpaceQuotaSnapshot(TableName tableName)
- throws IOException {
- throw new UnsupportedOperationException("getCurrentSpaceQuotaSnapshot");
- }
-
- @Override
- public void grant(UserPermission userPermission, boolean mergeExistingPermissions)
- throws IOException {
- throw new UnsupportedOperationException("grant");
- }
-
- @Override
- public void revoke(UserPermission userPermission) throws IOException {
- throw new UnsupportedOperationException("revoke");
- }
-
- @Override
- public List getUserPermissions(
- GetUserPermissionsRequest getUserPermissionsRequest) throws IOException {
- throw new UnsupportedOperationException("getUserPermissions");
- }
-
- @Override
- public List hasUserPermissions(String userName, List permissions)
- throws IOException {
- throw new UnsupportedOperationException("hasUserPermissions");
- }
-
- @Override
- public void compact(TableName arg0, CompactType arg1) throws IOException, InterruptedException {
- throw new UnsupportedOperationException("compact");
- }
-
- @Override
- public void compact(TableName arg0, byte[] arg1, CompactType arg2)
- throws IOException, InterruptedException {
- throw new UnsupportedOperationException("compact");
- }
-
- @Override
- public Future createNamespaceAsync(NamespaceDescriptor arg0) throws IOException {
- throw new UnsupportedOperationException("createNamespaceAsync");
- }
-
- @Override
- public void decommissionRegionServers(List arg0, boolean arg1) throws IOException {
- throw new UnsupportedOperationException("decommissionRegionServers");
- }
-
- @Override
- public void disableTableReplication(TableName arg0) throws IOException {
- throw new UnsupportedOperationException("disableTableReplication");
- }
-
- @Override
- public void enableTableReplication(TableName arg0) throws IOException {
- throw new UnsupportedOperationException("enableTableReplication");
- }
-
- @Override
- public Future enableReplicationPeerAsync(String s) {
- throw new UnsupportedOperationException("enableTableReplication");
- }
-
- @Override
- public Future disableReplicationPeerAsync(String s) {
- throw new UnsupportedOperationException("disableReplicationPeerAsync");
- }
-
- @Override
- public byte[] execProcedureWithReturn(String arg0, String arg1, Map arg2)
- throws IOException {
- throw new UnsupportedOperationException("execProcedureWithReturn");
- }
-
- @Override
- public CompactionState getCompactionState(TableName arg0) throws IOException {
- throw new UnsupportedOperationException("getCompactionState");
- }
-
- @Override
- public CompactionState getCompactionState(TableName arg0, CompactType arg1) throws IOException {
- throw new UnsupportedOperationException("getCompactionState");
- }
-
- @Override
- public CompactionState getCompactionStateForRegion(byte[] arg0) throws IOException {
- throw new UnsupportedOperationException("getCompactionStateForRegion");
- }
-
- @Override
- public long getLastMajorCompactionTimestamp(TableName arg0) throws IOException {
- throw new UnsupportedOperationException("getLastMajorCompactionTimestamp");
- }
-
- @Override
- public long getLastMajorCompactionTimestampForRegion(byte[] arg0) throws IOException {
- throw new UnsupportedOperationException("getLastMajorCompactionTimestamp");
- }
-
- @Override
- public String getLocks() throws IOException {
- // TODO : new in 2.0
- throw new UnsupportedOperationException("getLocks");
- }
-
- @Override
- public String getProcedures() throws IOException {
- // TODO : new in 2.0
- throw new UnsupportedOperationException("getProcedures");
- }
-
- @Override
- public QuotaRetriever getQuotaRetriever(QuotaFilter arg0) throws IOException {
- throw new UnsupportedOperationException("getQuotaRetriever");
- }
-
- @Override
- public List getRegions(ServerName arg0) throws IOException {
- throw new UnsupportedOperationException("getRegions");
- }
-
- @Override
- public void flushRegionServer(ServerName serverName) throws IOException {
- throw new UnsupportedOperationException("flushRegionServer");
- }
@Override
public List getRegions(TableName tableName) throws IOException {
@@ -598,273 +339,11 @@ public List getRegions(TableName tableName) throws IOException {
return regionInfo;
}
- @Override
- public List getSecurityCapabilities() throws IOException {
- throw new UnsupportedOperationException("getSecurityCapabilities");
- }
-
- @Override
- public boolean isBalancerEnabled() throws IOException {
- throw new UnsupportedOperationException("isBalancerEnabled");
- }
-
- @Override
- public boolean isCleanerChoreEnabled() throws IOException {
- throw new UnsupportedOperationException("isCleanerChoreEnabled");
- }
-
- @Override
- public boolean isMasterInMaintenanceMode() throws IOException {
- throw new UnsupportedOperationException("isMasterInMaintenanceMode");
- }
-
- @Override
- public boolean isNormalizerEnabled() throws IOException {
- throw new UnsupportedOperationException("isNormalizerEnabled");
- }
-
- @Override
- public boolean isSnapshotFinished(SnapshotDescription arg0)
- throws IOException, HBaseSnapshotException, UnknownSnapshotException {
- throw new UnsupportedOperationException("isSnapshotFinished");
- }
-
- @Override
- public List listDeadServers() throws IOException {
- throw new UnsupportedOperationException("listDeadServers");
- }
-
- @Override
- public List listDecommissionedRegionServers() throws IOException {
- throw new UnsupportedOperationException("listDecommissionedRegionServers");
- }
-
- @Override
- public List listReplicatedTableCFs() throws IOException {
- throw new UnsupportedOperationException("listReplicatedTableCFs");
- }
-
- @Override
- public void majorCompact(TableName arg0, CompactType arg1)
- throws IOException, InterruptedException {
- throw new UnsupportedOperationException("majorCompact"); // TODO
- }
-
- @Override
- public void majorCompact(TableName arg0, byte[] arg1, CompactType arg2)
- throws IOException, InterruptedException {
- throw new UnsupportedOperationException("majorCompact"); // TODO
- }
-
- @Override
- public Map compactionSwitch(
- boolean switchState, List serverNamesList) throws IOException {
- throw new UnsupportedOperationException("compactionSwitch");
- }
-
- @Override
- public Future mergeRegionsAsync(byte[][] arg0, boolean arg1) throws IOException {
- throw new UnsupportedOperationException("mergeRegionsAsync"); // TODO
- }
-
- @Override
- public Future splitRegionAsync(byte[] regionName) throws IOException {
- throw new UnsupportedOperationException("splitRegionAsync");
- }
-
- @Override
- public Future mergeRegionsAsync(byte[] arg0, byte[] arg1, boolean arg2) throws IOException {
- throw new UnsupportedOperationException("mergeRegionsAsync"); // TODO
- }
-
- @Override
- public Future modifyNamespaceAsync(NamespaceDescriptor arg0) throws IOException {
- throw new UnsupportedOperationException("modifyNamespaceAsync"); // TODO
- }
-
- @Override
- public boolean normalize() throws IOException {
- throw new UnsupportedOperationException("normalize"); // TODO
- }
-
- @Override
- public boolean normalizerSwitch(boolean arg0) throws IOException {
- throw new UnsupportedOperationException("normalizerSwitch"); // TODO
- }
-
- @Override
- public void recommissionRegionServer(ServerName arg0, List arg1) throws IOException {
- throw new UnsupportedOperationException("recommissionRegionServer"); // TODO
- }
-
- @Override
- public void restoreSnapshot(String arg0, boolean arg1, boolean arg2)
- throws IOException, RestoreSnapshotException {
- throw new UnsupportedOperationException("restoreSnapshot"); // TODO
- }
-
- @Override
- public Future restoreSnapshotAsync(String arg0)
- throws IOException, RestoreSnapshotException {
- throw new UnsupportedOperationException("restoreSnapshotAsync"); // TODO
- }
-
- @Override
- public int runCatalogJanitor() throws IOException {
- throw new UnsupportedOperationException("runCatalogJanitor"); // TODO
- }
-
- @Override
- public boolean runCleanerChore() throws IOException {
- throw new UnsupportedOperationException("runCleanerChore"); // TODO
- }
-
- @Override
- public void setQuota(QuotaSettings arg0) throws IOException {
- throw new UnsupportedOperationException("setQuota"); // TODO
- }
-
- @Override
- public Future splitRegionAsync(byte[] arg0, byte[] arg1) throws IOException {
- throw new UnsupportedOperationException("splitRegionAsync"); // TODO
- }
-
- @Override
- public void addReplicationPeer(String arg0, ReplicationPeerConfig arg1, boolean arg2)
- throws IOException {
- throw new UnsupportedOperationException("addReplicationPeer"); // TODO
- }
-
- @Override
- public Future addReplicationPeerAsync(String peerId, ReplicationPeerConfig peerConfig) {
- throw new UnsupportedOperationException("addReplicationPeerAsync"); // TODO
- }
-
- @Override
- public Future addReplicationPeerAsync(
- String s, ReplicationPeerConfig replicationPeerConfig, boolean b) {
- throw new UnsupportedOperationException("addReplicationPeerAsync"); // TODO
- }
-
- @Override
- public void appendReplicationPeerTableCFs(String arg0, Map> arg1)
- throws ReplicationException, IOException {
- throw new UnsupportedOperationException("appendReplicationPeerTableCFs"); // TODO
- }
-
- @Override
- public CacheEvictionStats clearBlockCache(TableName arg0) throws IOException {
- throw new UnsupportedOperationException("clearBlockCache"); // TODOv
- }
-
- @Override
- public void compactRegionServer(ServerName arg0) throws IOException {
- throw new UnsupportedOperationException("splitRegionAsync"); // TODO
- }
-
- @Override
- public void disableReplicationPeer(String arg0) throws IOException {
- throw new UnsupportedOperationException("disableReplicationPeer"); // TODO
- }
-
- @Override
- public void enableReplicationPeer(String arg0) throws IOException {
- throw new UnsupportedOperationException("enableReplicationPeer"); // TODO
- }
-
@Override
public ClusterMetrics getClusterMetrics(EnumSet