Skip to content

Commit

Permalink
Standardizing docstrings across bigquery samples.
Browse files Browse the repository at this point in the history
  • Loading branch information
Jon Wayne Parrott committed Sep 17, 2015
1 parent c0da62e commit c036d25
Show file tree
Hide file tree
Showing 8 changed files with 273 additions and 46 deletions.
38 changes: 36 additions & 2 deletions bigquery/api/async_query.py
100644 → 100755
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
#!/usr/bin/env python

# Copyright 2015, Google, Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
Expand All @@ -10,7 +12,38 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

"""Command-line application to perform an asynchronous query in BigQuery.
This sample is used on this page:
https://cloud.google.com/bigquery/querying-data#asyncqueries
In order to run it, your environment must be setup with authentication
information [1]. If you're running it in your local development environment and
you have the Google Cloud SDK [2] installed, you can do this easily by running:
$ gcloud auth login
[1] https://developers.google.com/identity/protocols/application-default-\
credentials#howtheywork
[2] https://cloud.google.com/sdk/
For more information on BigQuery you can visit:
https://developers.google.com/bigquery
For more information on the BigQuery API Python library surface you
can visit:
https://developers.google.com/resources/api-libraries/documentation/bigquery\
/v2/python/latest/
For information on the Python Client Library visit:
https://developers.google.com/api-client-library/python
"""

import argparse
import json
import time
Expand Down Expand Up @@ -103,7 +136,8 @@ def main(project_id, query_string, batch, num_retries, interval):
# [START main]
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Loads data into BigQuery.')
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('project_id', help='Your Google Cloud project ID.')
parser.add_argument('query', help='BigQuery SQL Query.')
parser.add_argument(
Expand Down
41 changes: 38 additions & 3 deletions bigquery/api/export_data_to_cloud_storage.py
100644 → 100755
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
# Copyright 2015, Google, Inc.
#!/usr/bin/env python

# Copyright 2015, Google, Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
Expand All @@ -10,7 +12,39 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

"""Command-line application to export a table from BigQuery to Google Cloud
Storage.
This sample is used on this page:
https://cloud.google.com/bigquery/exporting-data-from-bigquery
In order to run it, your environment must be setup with authentication
information [1]. If you're running it in your local development environment and
you have the Google Cloud SDK [2] installed, you can do this easily by running:
$ gcloud auth login
[1] https://developers.google.com/identity/protocols/application-default-\
credentials#howtheywork
[2] https://cloud.google.com/sdk/
For more information on BigQuery you can visit:
https://developers.google.com/bigquery
For more information on the BigQuery API Python library surface you
can visit:
https://developers.google.com/resources/api-libraries/documentation/bigquery\
/v2/python/latest/
For information on the Python Client Library visit:
https://developers.google.com/api-client-library/python
"""

import argparse
import time
import uuid
Expand Down Expand Up @@ -113,7 +147,8 @@ def main(cloud_storage_path, project_id, dataset_id, table_id,
# [START main]
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Exports data from BigQuery to Google Cloud Storage.')
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('project_id', help='Your Google Cloud project ID.')
parser.add_argument('dataset_id', help='BigQuery dataset to export.')
parser.add_argument('table_id', help='BigQuery table to export.')
Expand Down
38 changes: 33 additions & 5 deletions bigquery/api/getting_started.py
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,38 @@
# See the License for the specific language governing permissions and
# limitations under the License.

"""Sample for making BigQuery queries using the python sdk.
"""Command-line application that demonstrates basic BigQuery API usage.
This is a command-line script that queries a public shakespeare dataset, and
displays the 10 of Shakespeare's works with the greatest number of distinct
words.
This sample queries a public shakespeare dataset and displays the 10 of
Shakespeare's works with the greatest number of distinct words.
This sample is used on this page:
https://cloud.google.com/bigquery/bigquery-api-quickstart
In order to run it, your environment must be setup with authentication
information [1]. If you're running it in your local development environment and
you have the Google Cloud SDK [2] installed, you can do this easily by running:
$ gcloud auth login
[1] https://developers.google.com/identity/protocols/application-default-\
credentials#howtheywork
[2] https://cloud.google.com/sdk/
For more information on BigQuery you can visit:
https://developers.google.com/bigquery
For more information on the BigQuery API Python library surface you
can visit:
https://developers.google.com/resources/api-libraries/documentation/bigquery\
/v2/python/latest/
For information on the Python Client Library visit:
https://developers.google.com/api-client-library/python
"""
# [START all]
import argparse
Expand Down Expand Up @@ -63,7 +90,8 @@ def main(project_id):

if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Queries the public BigQuery Shakespeare dataset.')
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('project_id', help='Your Google Cloud Project ID.')

args = parser.parse_args()
Expand Down
48 changes: 21 additions & 27 deletions bigquery/api/list_datasets_projects.py
100644 → 100755
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
# -*- coding: utf-8 -*-
#!/usr/bin/env python

# Copyright 2015, Google, Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
Expand All @@ -11,10 +12,10 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Command-line skeleton application for BigQuery API.

This is the sample for this page:
"""Command-line application to list all projects and datasets in BigQuery.
This sample is used on this page:
https://cloud.google.com/bigquery/docs/managing_jobs_datasets_projects
Expand All @@ -24,31 +25,23 @@
$ gcloud auth login
Usage:
$ python list_datasets_projects.py <project-id>
where <project-id> is the id of the developers console [3] project you'd like
to list the bigquery datasets and projects for.
[1] https://developers.google.com/identity/protocols/\
application-default-credentials#howtheywork
[1] https://developers.google.com/identity/protocols/application-default-\
credentials#howtheywork
[2] https://cloud.google.com/sdk/
[3] https://console.developers.google.com
For more information on the BigQuery API you can visit:
For more information on BigQuery you can visit:
https://developers.google.com/bigquery/docs/overview
https://developers.google.com/bigquery
For more information on the BigQuery API Python library surface you
can visit:
https://developers.google.com/resources/api-libraries/documentation/
bigquery/v2/python/latest/
https://developers.google.com/resources/api-libraries/documentation/bigquery\
/v2/python/latest/
For information on the Python Client Library visit:
https://developers.google.com/api-client-library/python/start/get_started
https://developers.google.com/api-client-library/python
"""

import argparse
Expand All @@ -60,9 +53,9 @@


# [START list_datasets]
def list_datasets(service, project):
def list_datasets(bigquery, project):
try:
datasets = service.datasets()
datasets = bigquery.datasets()
list_reply = datasets.list(projectId=project).execute()
print('Dataset list:')
pprint(list_reply)
Expand All @@ -74,10 +67,10 @@ def list_datasets(service, project):


# [START list_projects]
def list_projects(service):
def list_projects(bigquery):
try:
# Start training on a data set
projects = service.projects()
projects = bigquery.projects()
list_reply = projects.list().execute()

print('Project list:')
Expand All @@ -92,14 +85,15 @@ def list_projects(service):
def main(project_id):
credentials = GoogleCredentials.get_application_default()
# Construct the service object for interacting with the BigQuery API.
service = discovery.build('bigquery', 'v2', credentials=credentials)
bigquery = discovery.build('bigquery', 'v2', credentials=credentials)

list_datasets(service, project_id)
list_projects(service)
list_datasets(bigquery, project_id)
list_projects(bigquery)

if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Lists BigQuery datasets and projects.')
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('project_id', help='the project id to list.')

args = parser.parse_args()
Expand Down
38 changes: 36 additions & 2 deletions bigquery/api/load_data_by_post.py
100644 → 100755
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
#!/usr/bin/env python

# Copyright 2015, Google, Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
Expand All @@ -10,7 +12,38 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

"""Command-line application that loads data into BigQuery via HTTP POST.
This sample is used on this page:
https://cloud.google.com/bigquery/loading-data-post-request
In order to run it, your environment must be setup with authentication
information [1]. If you're running it in your local development environment and
you have the Google Cloud SDK [2] installed, you can do this easily by running:
$ gcloud auth login
[1] https://developers.google.com/identity/protocols/application-default-\
credentials#howtheywork
[2] https://cloud.google.com/sdk/
For more information on BigQuery you can visit:
https://developers.google.com/bigquery
For more information on the BigQuery API Python library surface you
can visit:
https://developers.google.com/resources/api-libraries/documentation/bigquery\
/v2/python/latest/
For information on the Python Client Library visit:
https://developers.google.com/api-client-library/python
"""

import argparse
import json
import time
Expand Down Expand Up @@ -124,7 +157,8 @@ def main(project_id, dataset_id, table_name, schema_path, data_path):

if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Loads data into BigQuery.')
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('project_id', help='Your Google Cloud project ID.')
parser.add_argument('dataset_id', help='A BigQuery dataset ID.')
parser.add_argument(
Expand Down
40 changes: 37 additions & 3 deletions bigquery/api/load_data_from_csv.py
100644 → 100755
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
#!/usr/bin/env python

# Copyright 2015, Google, Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
Expand All @@ -10,7 +12,39 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

"""Command-line application that loads data into BigQuery from a CSV file in
Google Cloud Storage.
This sample is used on this page:
https://cloud.google.com/bigquery/loading-data-into-bigquery#loaddatagcs
In order to run it, your environment must be setup with authentication
information [1]. If you're running it in your local development environment and
you have the Google Cloud SDK [2] installed, you can do this easily by running:
$ gcloud auth login
[1] https://developers.google.com/identity/protocols/application-default-\
credentials#howtheywork
[2] https://cloud.google.com/sdk/
For more information on BigQuery you can visit:
https://developers.google.com/bigquery
For more information on the BigQuery API Python library surface you
can visit:
https://developers.google.com/resources/api-libraries/documentation/bigquery\
/v2/python/latest/
For information on the Python Client Library visit:
https://developers.google.com/api-client-library/python
"""

import argparse
import json
import time
Expand Down Expand Up @@ -119,8 +153,8 @@ def main(project_id, dataset_id, table_name, schema_file, data_path,
# [START main]
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Loads data into BigQuery from a CSV file in Google '
'Cloud Storage.')
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('project_id', help='Your Google Cloud project ID.')
parser.add_argument('dataset_id', help='A BigQuery dataset ID.')
parser.add_argument(
Expand Down
Loading

0 comments on commit c036d25

Please sign in to comment.