Skip to content

Commit

Permalink
Merge pull request #21 from sbneto/gcp
Browse files Browse the repository at this point in the history
Gcp
  • Loading branch information
sbneto authored May 7, 2019
2 parents 00b682a + bd2a44e commit 3029f90
Show file tree
Hide file tree
Showing 5 changed files with 100 additions and 15 deletions.
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -6,3 +6,4 @@ pytest~=3.6.3
pytest-pythonpath~=0.7.2
pytest-cov~=2.5.1
configobj~=5.0.6
google-cloud-storage==1.15.0
32 changes: 18 additions & 14 deletions s3conf/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,12 +14,6 @@
logger = logging.getLogger(__name__)


STORAGES = {
's3': storages.S3Storage,
'local': storages.LocalStorage,
}


class SectionArgument(click.Argument):
def handle_parse_result(self, *args, **kwargs):
try:
Expand Down Expand Up @@ -53,7 +47,8 @@ def main(ctx, edit, create):
if ctx.invoked_subcommand is None:
settings = config.Settings()
logger.debug('Using config file %s', settings.config_file)
STORAGES['local'](settings=settings).open(settings.config_file).edit(create=create)
StorageCls = storages.get_storage(settings.config_file)
StorageCls(settings=settings).open(settings.config_file).edit(create=create)
return
else:
raise UsageError('Edit should not be called with a subcommand.')
Expand Down Expand Up @@ -99,7 +94,8 @@ def env(section, map_files, phusion, phusion_path, quiet, edit, create):
try:
logger.debug('Running env command')
settings = config.Settings(section=section)
storage = STORAGES['s3'](settings=settings)
StorageCls = storages.get_storage(settings.environment_file_path)
storage = StorageCls(settings=settings)
conf = s3conf.S3Conf(storage=storage, settings=settings)

if edit:
Expand Down Expand Up @@ -171,7 +167,8 @@ def push(section, force):
"""
try:
settings = config.Settings(section=section)
storage = STORAGES['s3'](settings=settings)
StorageCls = storages.get_storage(settings.environment_file_path)
storage = StorageCls(settings=settings)
conf = s3conf.S3Conf(storage=storage, settings=settings)
conf.push(force=force)
except exceptions.EnvfilePathNotDefinedError:
Expand Down Expand Up @@ -213,7 +210,8 @@ def exec_command(ctx, section, command, map_files):
return

settings = config.Settings(section=section)
storage = STORAGES['s3'](settings=settings)
StorageCls = storages.get_storage(settings.environment_file_path)
storage = StorageCls(settings=settings)
conf = s3conf.S3Conf(storage=storage, settings=settings)
with conf.get_envfile() as env_file:
env_vars = env_file.as_dict()
Expand Down Expand Up @@ -242,7 +240,8 @@ def download(remote_path, local_path):
If REMOTE_PATH does not have a trailing slash, it is considered to be a file, and LOCAL_PATH should be a file as
well.
"""
storage = STORAGES['s3']()
StorageCls = storages.get_storage(remote_path)
storage = StorageCls()
conf = s3conf.S3Conf(storage=storage)
conf.download(remote_path, local_path)

Expand All @@ -258,7 +257,8 @@ def upload(remote_path, local_path):
If LOCAL_PATH is a file, the REMOTE_PATH file is created with the same contents.
"""
storage = STORAGES['s3']()
StorageCls = storages.get_storage(remote_path)
storage = StorageCls()
conf = s3conf.S3Conf(storage=storage)
conf.upload(local_path, remote_path)

Expand All @@ -285,7 +285,9 @@ def set_variable(section, value, create):
try:
logger.debug('Running env command')
settings = config.Settings(section=section)
conf = s3conf.S3Conf(settings=settings)
StorageCls = storages.get_storage(settings.environment_file_path)
storage = StorageCls(settings=settings)
conf = s3conf.S3Conf(storage=storage, settings=settings)

with conf.get_envfile() as env_vars:
env_vars.set(value, create=create)
Expand All @@ -310,7 +312,9 @@ def unset_variable(section, value):
try:
logger.debug('Running env command')
settings = config.Settings(section=section)
conf = s3conf.S3Conf(settings=settings)
StorageCls = storages.get_storage(settings.environment_file_path)
storage = StorageCls(settings=settings)
conf = s3conf.S3Conf(storage=storage, settings=settings)

with conf.get_envfile() as env_vars:
env_vars.unset(value)
Expand Down
4 changes: 4 additions & 0 deletions s3conf/patch.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,10 @@
from editor import get_editor, get_editor_args
from click_log import core

import warnings

# https://github.com/googleapis/google-auth-library-python/issues/271
warnings.filterwarnings("ignore", "Your application has authenticated using end user credentials")

# apply patches that allow editor with args
# https://github.com/fmoo/python-editor/pull/15
Expand Down
3 changes: 2 additions & 1 deletion s3conf/s3conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,8 @@ def raise_out_of_sync(local_file, remote_file):
class S3Conf:
def __init__(self, storage=None, settings=None):
self.settings = settings or config.Settings()
self.storage = storage or storages.S3Storage(settings=self.settings)
StorageCls = storages.get_storage(settings.environment_file_path)
self.storage = storage or StorageCls(settings=self.settings)

def push(self, force=False):
if not force:
Expand Down
75 changes: 75 additions & 0 deletions s3conf/storages.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@

import boto3
from botocore.exceptions import ClientError
from google.cloud import storage

from .utils import md5s3
from .files import File, copyfileobj
Expand All @@ -24,6 +25,26 @@ def strip_s3_path(path):
return bucket, path


def strip_gs_path(path):
bucket, _, path = strip_prefix(path, 'gs://').partition('/')
return bucket, path


def get_storage(path):
logger.debug('Getting storage for %s', path)
prefix, _, _ = str(path).partition(':')
try:
storage = {
's3': S3Storage,
'gs': GCStorage,
'file': LocalStorage,
}[prefix]
except KeyError:
storage = LocalStorage
logger.debug('Using %s storage', storage)
return storage


class BaseStorage:
def __init__(self, settings):
self.settings = settings
Expand Down Expand Up @@ -117,6 +138,60 @@ def list(self, path):
raise


class GCStorage(BaseStorage):
def __init__(self, settings=None):
super(__class__, self).__init__(settings=settings)
self._resource = None

@property
def gcs(self):
logger.debug('Getting GCS resource')
if not self._resource:
logger.debug('Resource does not exist, creating a new one...')
credential_file = self.settings.get('S3CONF_APPLICATION_CREDENTIALS') \
or self.settings.get('GOOGLE_APPLICATION_CREDENTIALS')
if credential_file:
self._resource = storage.Client.from_service_account_json(credential_file)
else:
self._resource = storage.Client()
return self._resource

def read_into_stream(self, file_path, stream=None):
stream = stream or BytesIO()
bucket_name, path = strip_gs_path(file_path)
bucket = self.gcs.get_bucket(bucket_name)
blob = bucket.blob(path)
blob.download_to_file(stream)
stream.seek(0)
return stream

def _write(self, f, file_name):
bucket_name, path = strip_gs_path(file_name)
bucket = self.gcs.get_bucket(bucket_name)
blob = bucket.blob(path)
f.seek(0)
blob.upload_from_file(f, path)

def write(self, f, file_name):
logger.debug('Writing to %s', file_name)
try:
self._write(f, file_name)
except Exception:
bucket, _ = strip_gs_path(file_name)
self.gcs.create_bucket(bucket)
self._write(f, file_name)

def list(self, path):
logger.debug('Listing %s', path)
bucket_name, path = strip_gs_path(path)
bucket = self.gcs.get_bucket(bucket_name)
path = path.rstrip('/')
for obj in bucket.list_blobs(prefix=path):
relative_path = strip_prefix(obj.name, path)
if relative_path.startswith('/') or not relative_path:
yield obj.etag, relative_path.lstrip('/')


class LocalStorage(BaseStorage):
def read_into_stream(self, file_name, stream=None):
try:
Expand Down

0 comments on commit 3029f90

Please sign in to comment.