Skip to content

Commit

Permalink
Merge pull request #1932 from GoogleCloudPlatform/bigtable-v2
Browse files Browse the repository at this point in the history
Land Bigtable v2
  • Loading branch information
tseaver authored Jun 29, 2016
2 parents 7f65402 + d085294 commit 1a2fa6c
Show file tree
Hide file tree
Showing 59 changed files with 12,049 additions and 2,741 deletions.
1 change: 1 addition & 0 deletions .coveragerc
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
[report]
omit =
*/_generated/*.py
*/_generated_v2/*.py
show_missing = True
exclude_lines =
# Re-enable the standard pragma
Expand Down
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -57,3 +57,4 @@ scripts/pylintrc_reduced
generated_python/
cloud-bigtable-client/
googleapis-pb/
grpc_python_venv/
56 changes: 24 additions & 32 deletions Makefile → Makefile.bigtable_v1
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
GRPCIO_VIRTUALENV=$(shell pwd)/grpc_python_venv
GENERATED_DIR=$(shell pwd)/generated_python
BIGTABLE_DIR=$(shell pwd)/gcloud/bigtable/_generated
DATASTORE_DIR=$(shell pwd)/gcloud/datastore/_generated
GRPC_PLUGIN=grpc_python_plugin
PROTOC_CMD=protoc
BIGTABLE_PROTOS_DIR=$(shell pwd)/cloud-bigtable-client/bigtable-protos/src/main/proto
GENERATED_SUBDIR=_generated
BIGTABLE_DIR=$(shell pwd)/gcloud/bigtable/$(GENERATED_SUBDIR)
PROTOC_CMD=$(GRPCIO_VIRTUALENV)/bin/python -m grpc.tools.protoc
GOOGLEAPIS_PROTOS_DIR=$(shell pwd)/googleapis-pb
BIGTABLE_CHECKOUT_DIR=$(shell pwd)/cloud-bigtable-client
BIGTABLE_PROTOS_DIR=$(BIGTABLE_CHECKOUT_DIR)/bigtable-client-core-parent/bigtable-protos/src/main/proto

help:
@echo 'Makefile for gcloud-python Bigtable protos '
Expand All @@ -14,19 +15,22 @@ help:
@echo ' make clean Clean generated files '

generate:
# Ensure we have a virtualenv w/ up-to-date grpcio/grpcio-tools
[ -d $(GRPCIO_VIRTUALENV) ] || python2.7 -m virtualenv $(GRPCIO_VIRTUALENV)
$(GRPCIO_VIRTUALENV)/bin/pip install --upgrade grpcio grpcio-tools
# Retrieve git repos that have our *.proto files.
[ -d cloud-bigtable-client ] || git clone https://github.com/GoogleCloudPlatform/cloud-bigtable-client --depth=1
cd cloud-bigtable-client && git pull origin master
[ -d googleapis-pb ] || git clone https://github.com/google/googleapis googleapis-pb --depth=1
cd googleapis-pb && git pull origin master
[ -d $(BIGTABLE_CHECKOUT_DIR) ] || git clone https://github.com/GoogleCloudPlatform/cloud-bigtable-client --depth=1
cd $(BIGTABLE_CHECKOUT_DIR) && git pull origin master
[ -d $(GOOGLEAPIS_PROTOS_DIR) ] || git clone https://github.com/google/googleapis googleapis-pb --depth=1
cd $(GOOGLEAPIS_PROTOS_DIR) && git pull origin master
# Make the directory where our *_pb2.py files will go.
mkdir -p $(GENERATED_DIR)
# Generate all *_pb2.py files that require gRPC.
$(PROTOC_CMD) \
--proto_path=$(BIGTABLE_PROTOS_DIR) \
--proto_path=$(GOOGLEAPIS_PROTOS_DIR) \
--python_out=$(GENERATED_DIR) \
--plugin=protoc-gen-grpc=$(GRPC_PLUGIN) \
--grpc_out=$(GENERATED_DIR) \
--grpc_python_out=$(GENERATED_DIR) \
$(BIGTABLE_PROTOS_DIR)/google/bigtable/v1/bigtable_service.proto \
$(BIGTABLE_PROTOS_DIR)/google/bigtable/admin/cluster/v1/bigtable_cluster_service.proto \
$(BIGTABLE_PROTOS_DIR)/google/bigtable/admin/table/v1/bigtable_table_service.proto
Expand All @@ -41,49 +45,37 @@ generate:
$(BIGTABLE_PROTOS_DIR)/google/bigtable/admin/cluster/v1/bigtable_cluster_service_messages.proto \
$(BIGTABLE_PROTOS_DIR)/google/bigtable/admin/table/v1/bigtable_table_data.proto \
$(BIGTABLE_PROTOS_DIR)/google/bigtable/admin/table/v1/bigtable_table_service_messages.proto \
$(GOOGLEAPIS_PROTOS_DIR)/google/datastore/v1beta3/datastore.proto \
$(GOOGLEAPIS_PROTOS_DIR)/google/datastore/v1beta3/entity.proto \
$(GOOGLEAPIS_PROTOS_DIR)/google/datastore/v1beta3/query.proto
# Move the newly generated *_pb2.py files into our library.
mv $(GENERATED_DIR)/google/bigtable/v1/* $(BIGTABLE_DIR)
mv $(GENERATED_DIR)/google/bigtable/admin/cluster/v1/* $(BIGTABLE_DIR)
mv $(GENERATED_DIR)/google/bigtable/admin/table/v1/* $(BIGTABLE_DIR)
mv $(GENERATED_DIR)/google/datastore/v1beta3/* $(DATASTORE_DIR)
cp $(GENERATED_DIR)/google/bigtable/v1/* $(BIGTABLE_DIR)
cp $(GENERATED_DIR)/google/bigtable/admin/cluster/v1/* $(BIGTABLE_DIR)
cp $(GENERATED_DIR)/google/bigtable/admin/table/v1/* $(BIGTABLE_DIR)
# Remove all existing *.proto files before we replace
rm -f $(BIGTABLE_DIR)/*.proto
rm -f $(DATASTORE_DIR)/*.proto
# Copy over the *.proto files into our library.
cp $(BIGTABLE_PROTOS_DIR)/google/bigtable/v1/*.proto $(BIGTABLE_DIR)
cp $(BIGTABLE_PROTOS_DIR)/google/bigtable/admin/cluster/v1/*.proto $(BIGTABLE_DIR)
cp $(BIGTABLE_PROTOS_DIR)/google/bigtable/admin/table/v1/*.proto $(BIGTABLE_DIR)
cp $(BIGTABLE_PROTOS_DIR)/google/longrunning/operations.proto $(BIGTABLE_DIR)
cp $(GOOGLEAPIS_PROTOS_DIR)/google/datastore/v1beta3/*.proto $(DATASTORE_DIR)
cp $(GOOGLEAPIS_PROTOS_DIR)/google/longrunning/operations.proto $(BIGTABLE_DIR)
# Rename all *.proto files in our library with an
# underscore and remove executable bit.
cd $(BIGTABLE_DIR) && \
for filename in *.proto; do \
chmod -x $$filename ; \
mv $$filename _$$filename ; \
done
cd $(DATASTORE_DIR) && \
for filename in *.proto; do \
chmod -x $$filename ; \
mv $$filename _$$filename ; \
done
# Separate the gRPC parts of the operations service from the
# non-gRPC parts so that the protos from `googleapis-common-protos`
# can be used without gRPC.
python scripts/make_operations_grpc.py
# Separate the gRPC parts of the datastore service from the
# non-gRPC parts so that the protos can be used without gRPC.
python scripts/make_datastore_grpc.py
GRPCIO_VIRTUALENV="$(GRPCIO_VIRTUALENV)" \
GENERATED_SUBDIR=$(GENERATED_SUBDIR) \
python scripts/make_operations_grpc.py
# Rewrite the imports in the generated *_pb2.py files.
python scripts/rewrite_imports.py
python scripts/rewrite_imports.py $(BIGTABLE_DIR)/*pb2.py

check_generate:
python scripts/check_generate.py

clean:
rm -fr cloud-bigtable-client $(GENERATED_DIR)
rm -fr $(GRPCIO_VIRTUALENV) $(GOOGLEAPIS_PROTOS_DIR) $(GENERATED_DIR)

.PHONY: generate check_generate clean
71 changes: 71 additions & 0 deletions Makefile.bigtable_v2
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
GRPCIO_VIRTUALENV=$(shell pwd)/grpc_python_venv
GENERATED_DIR=$(shell pwd)/generated_python
GENERATED_SUBDIR=_generated_v2
BIGTABLE_DIR=$(shell pwd)/gcloud/bigtable/$(GENERATED_SUBDIR)
PROTOC_CMD=$(GRPCIO_VIRTUALENV)/bin/python -m grpc.tools.protoc
GOOGLEAPIS_PROTOS_DIR=$(shell pwd)/googleapis-pb

help:
@echo 'Makefile for gcloud-python Bigtable protos '
@echo ' '
@echo ' make generate Generates the protobuf modules '
@echo ' make check_generate Checks that generate succeeded '
@echo ' make clean Clean generated files '

generate:
# Ensure we have a virtualenv w/ up-to-date grpcio/grpcio-tools
[ -d $(GRPCIO_VIRTUALENV) ] || python2.7 -m virtualenv $(GRPCIO_VIRTUALENV)
$(GRPCIO_VIRTUALENV)/bin/pip install --upgrade grpcio grpcio-tools
# Retrieve git repos that have our *.proto files.
[ -d googleapis-pb ] || git clone https://github.com/google/googleapis googleapis-pb --depth=1
cd googleapis-pb && git pull origin master
# Make the directory where our *_pb2.py files will go.
mkdir -p $(GENERATED_DIR)
# Generate all *_pb2.py files that require gRPC.
$(PROTOC_CMD) \
--proto_path=$(GOOGLEAPIS_PROTOS_DIR) \
--python_out=$(GENERATED_DIR) \
--grpc_python_out=$(GENERATED_DIR) \
$(GOOGLEAPIS_PROTOS_DIR)/google/bigtable/v2/bigtable.proto \
$(GOOGLEAPIS_PROTOS_DIR)/google/bigtable/admin/v2/bigtable_instance_admin.proto \
$(GOOGLEAPIS_PROTOS_DIR)/google/bigtable/admin/v2/bigtable_table_admin.proto
# Generate all *_pb2.py files that do not require gRPC.
$(PROTOC_CMD) \
--proto_path=$(GOOGLEAPIS_PROTOS_DIR) \
--python_out=$(GENERATED_DIR) \
$(GOOGLEAPIS_PROTOS_DIR)/google/bigtable/v2/data.proto \
$(GOOGLEAPIS_PROTOS_DIR)/google/bigtable/admin/v2/common.proto \
$(GOOGLEAPIS_PROTOS_DIR)/google/bigtable/admin/v2/instance.proto \
$(GOOGLEAPIS_PROTOS_DIR)/google/bigtable/admin/v2/table.proto \
# Move the newly generated *_pb2.py files into our library.
cp $(GENERATED_DIR)/google/bigtable/v2/* $(BIGTABLE_DIR)
cp $(GENERATED_DIR)/google/bigtable/admin/v2/* $(BIGTABLE_DIR)
# Remove all existing *.proto files before we replace
rm -f $(BIGTABLE_DIR)/*.proto
# Copy over the *.proto files into our library.
cp $(GOOGLEAPIS_PROTOS_DIR)/google/bigtable/v2/*.proto $(BIGTABLE_DIR)
cp $(GOOGLEAPIS_PROTOS_DIR)/google/bigtable/admin/v2/*.proto $(BIGTABLE_DIR)
cp $(GOOGLEAPIS_PROTOS_DIR)/google/longrunning/operations.proto $(BIGTABLE_DIR)
# Rename all *.proto files in our library with an
# underscore and remove executable bit.
cd $(BIGTABLE_DIR) && \
for filename in *.proto; do \
chmod -x $$filename ; \
mv $$filename _$$filename ; \
done
# Separate the gRPC parts of the operations service from the
# non-gRPC parts so that the protos from `googleapis-common-protos`
# can be used without gRPC.
GRPCIO_VIRTUALENV="$(GRPCIO_VIRTUALENV)" \
GENERATED_SUBDIR=$(GENERATED_SUBDIR) \
python scripts/make_operations_grpc.py
# Rewrite the imports in the generated *_pb2.py files.
python scripts/rewrite_imports.py $(BIGTABLE_DIR)/*pb2.py

check_generate:
python scripts/check_generate.py

clean:
rm -fr $(GRPCIO_VIRTUALENV) $(GOOGLEAPIS_PROTOS_DIR) $(GENERATED_DIR)

.PHONY: generate check_generate clean
57 changes: 57 additions & 0 deletions Makefile.datastore
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
GRPCIO_VIRTUALENV=$(shell pwd)/grpc_python_venv
GENERATED_DIR=$(shell pwd)/generated_python
DATASTORE_DIR=$(shell pwd)/gcloud/datastore/_generated
PROTOC_CMD=$(GRPCIO_VIRTUALENV)/bin/python -m grpc.tools.protoc
GOOGLEAPIS_PROTOS_DIR=$(shell pwd)/googleapis-pb

help:
@echo 'Makefile for gcloud-python Bigtable protos '
@echo ' '
@echo ' make generate Generates the protobuf modules '
@echo ' make check_generate Checks that generate succeeded '
@echo ' make clean Clean generated files '

generate:
# Ensure we have a virtualenv w/ up-to-date grpcio/grpcio-tools
[ -d $(GRPCIO_VIRTUALENV) ] || python2.7 -m virtualenv $(GRPCIO_VIRTUALENV)
$(GRPCIO_VIRTUALENV)/bin/pip install --upgrade grpcio grpcio-tools
# Retrieve git repos that have our *.proto files.
[ -d googleapis-pb ] || git clone https://github.com/google/googleapis googleapis-pb --depth=1
cd googleapis-pb && git pull origin master
# Make the directory where our *_pb2.py files will go.
mkdir -p $(GENERATED_DIR)
# Generate all *_pb2.py files that do not require gRPC.
$(PROTOC_CMD) \
--proto_path=$(GOOGLEAPIS_PROTOS_DIR) \
--python_out=$(GENERATED_DIR) \
$(GOOGLEAPIS_PROTOS_DIR)/google/datastore/v1beta3/datastore.proto \
$(GOOGLEAPIS_PROTOS_DIR)/google/datastore/v1beta3/entity.proto \
$(GOOGLEAPIS_PROTOS_DIR)/google/datastore/v1beta3/query.proto
# Move the newly generated *_pb2.py files into our library.
cp $(GENERATED_DIR)/google/datastore/v1beta3/* $(DATASTORE_DIR)
# Remove all existing *.proto files before we replace
rm -f $(DATASTORE_DIR)/*.proto
# Copy over the *.proto files into our library.
cp $(GOOGLEAPIS_PROTOS_DIR)/google/datastore/v1beta3/*.proto $(DATASTORE_DIR)
# Rename all *.proto files in our library with an
# underscore and remove executable bit.
cd $(DATASTORE_DIR) && \
for filename in *.proto; do \
chmod -x $$filename ; \
mv $$filename _$$filename ; \
done
# Separate the gRPC parts of the datastore service from the
# non-gRPC parts so that the protos can be used without gRPC.
GRPCIO_VIRTUALENV="$(GRPCIO_VIRTUALENV)" \
GENERATED_SUBDIR=$(GENERATED_SUBDIR) \
python scripts/make_datastore_grpc.py
# Rewrite the imports in the generated *_pb2.py files.
python scripts/rewrite_imports.py $(DATASTORE_DIR)/*pb2.py

check_generate:
python scripts/check_generate.py

clean:
rm -fr $(GENERATED_DIR)

.PHONY: generate check_generate clean
8 changes: 4 additions & 4 deletions docs/bigtable-client-intro.rst
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ Configuration
Admin API Access
----------------

If you'll be using your client to make `Cluster Admin`_ and `Table Admin`_
If you'll be using your client to make `Instance Admin`_ and `Table Admin`_
API requests, you'll need to pass the ``admin`` argument:

.. code:: python
Expand All @@ -89,10 +89,10 @@ Next Step
---------

After a :class:`Client <gcloud.bigtable.client.Client>`, the next highest-level
object is a :class:`Cluster <gcloud.bigtable.cluster.Cluster>`. You'll need
object is a :class:`Instance <gcloud.bigtable.instance.Instance>`. You'll need
one before you can interact with tables or data.

Head next to learn about the :doc:`bigtable-cluster-api`.
Head next to learn about the :doc:`bigtable-instance-api`.

.. _Cluster Admin: https://github.com/GoogleCloudPlatform/cloud-bigtable-client/tree/master/bigtable-protos/src/main/proto/google/bigtable/admin/cluster/v1
.. _Instance Admin: https://github.com/GoogleCloudPlatform/cloud-bigtable-client/tree/master/bigtable-protos/src/main/proto/google/bigtable/admin/instance/v1
.. _Table Admin: https://github.com/GoogleCloudPlatform/cloud-bigtable-client/tree/master/bigtable-protos/src/main/proto/google/bigtable/admin/table/v1
Loading

0 comments on commit 1a2fa6c

Please sign in to comment.