Skip to content

Commit

Permalink
Merge pull request #991 from fishtown-analytics/fix/drop-table-quoting
Browse files Browse the repository at this point in the history
Fix table/schema quoting on drop, truncate, and rename (#983)
  • Loading branch information
beckjake authored Sep 13, 2018
2 parents f473eae + 7cbec9e commit 16fa082
Show file tree
Hide file tree
Showing 5 changed files with 202 additions and 12 deletions.
27 changes: 18 additions & 9 deletions dbt/adapters/default/impl.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,8 +50,6 @@ def test(row):


class DefaultAdapter(object):
DEFAULT_QUOTE = True

requires = {}

context_functions = [
Expand Down Expand Up @@ -179,7 +177,8 @@ def drop(cls, profile, project_cfg, schema,
relation = cls.Relation.create(
schema=schema,
identifier=identifier,
type=relation_type)
type=relation_type,
quote_policy=project_cfg.get('quoting', {}))

return cls.drop_relation(profile, project_cfg, relation, model_name)

Expand All @@ -200,7 +199,8 @@ def truncate(cls, profile, project_cfg, schema, table, model_name=None):
relation = cls.Relation.create(
schema=schema,
identifier=table,
type='table')
type='table',
quote_policy=project_cfg.get('quoting', {}))

return cls.truncate_relation(profile, project_cfg,
relation, model_name)
Expand All @@ -215,12 +215,20 @@ def truncate_relation(cls, profile, project_cfg,
@classmethod
def rename(cls, profile, project_cfg, schema,
from_name, to_name, model_name=None):
quote_policy = project_cfg.get('quoting', {})
from_relation = cls.Relation.create(
schema=schema,
identifier=from_name,
quote_policy=quote_policy
)
to_relation = cls.Relation.create(
identifier=to_name,
quote_policy=quote_policy
)
return cls.rename_relation(
profile, project_cfg,
from_relation=cls.Relation.create(
schema=schema, identifier=from_name),
to_relation=cls.Relation.create(
identifier=to_name),
from_relation=from_relation,
to_relation=to_relation,
model_name=model_name)

@classmethod
Expand Down Expand Up @@ -754,7 +762,8 @@ def _quote_as_configured(cls, project_cfg, identifier, quote_key):
"""This is the actual implementation of quote_as_configured, without
the extra arguments needed for use inside materialization code.
"""
if project_cfg.get('quoting', {}).get(quote_key, cls.DEFAULT_QUOTE):
default = cls.Relation.DEFAULTS['quote_policy'].get(quote_key)
if project_cfg.get('quoting', {}).get(quote_key, default):
return cls.quote(identifier)
else:
return identifier
Expand Down
6 changes: 5 additions & 1 deletion dbt/adapters/postgres/impl.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,11 @@ def alter_column_type(cls, profile, project, schema, table, column_name,
4. Rename the new column to existing column
"""

relation = cls.Relation.create(schema=schema, identifier=table)
relation = cls.Relation.create(
schema=schema,
identifier=table,
quote_policy=project.get('quoting', {})
)

opts = {
"relation": relation,
Expand Down
2 changes: 0 additions & 2 deletions dbt/adapters/snowflake/impl.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,6 @@


class SnowflakeAdapter(PostgresAdapter):
DEFAULT_QUOTE = False

Relation = SnowflakeRelation

@classmethod
Expand Down
85 changes: 85 additions & 0 deletions test/unit/test_postgres_adapter.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
from dbt.adapters.postgres import PostgresAdapter
from dbt.exceptions import ValidationException
from dbt.logger import GLOBAL_LOGGER as logger # noqa
from psycopg2 import extensions as psycopg2_extensions
import agate


Expand Down Expand Up @@ -108,3 +109,87 @@ def test_get_catalog_various_schemas(self, mock_run):
set(map(tuple, catalog)),
{('foo', 'bar'), ('FOO', 'baz'), ('quux', 'bar')}
)

class TestConnectingPostgresAdapter(unittest.TestCase):
def setUp(self):
flags.STRICT_MODE = False

self.profile = {
'dbname': 'postgres',
'user': 'root',
'host': 'database',
'pass': 'password',
'port': 5432,
'schema': 'public'
}

self.project = {
'name': 'X',
'version': '0.1',
'profile': 'test',
'project-root': '/tmp/dbt/does-not-exist',
'quoting': {
'identifier': False,
'schema': True,
}
}

self.handle = mock.MagicMock(spec=psycopg2_extensions.connection)
self.cursor = self.handle.cursor.return_value
self.mock_execute = self.cursor.execute
self.patcher = mock.patch('dbt.adapters.postgres.impl.psycopg2')
self.psycopg2 = self.patcher.start()

self.psycopg2.connect.return_value = self.handle
conn = PostgresAdapter.get_connection(self.profile)

def tearDown(self):
# we want a unique self.handle every time.
PostgresAdapter.cleanup_connections()
self.patcher.stop()

def test_quoting_on_drop_schema(self):
PostgresAdapter.drop_schema(
profile=self.profile,
project_cfg=self.project,
schema='test_schema'
)

self.mock_execute.assert_has_calls([
mock.call('drop schema if exists "test_schema" cascade', None)
])

def test_quoting_on_drop(self):
PostgresAdapter.drop(
profile=self.profile,
project_cfg=self.project,
schema='test_schema',
relation='test_table',
relation_type='table'
)
self.mock_execute.assert_has_calls([
mock.call('drop table if exists "test_schema".test_table cascade', None)
])

def test_quoting_on_truncate(self):
PostgresAdapter.truncate(
profile=self.profile,
project_cfg=self.project,
schema='test_schema',
table='test_table'
)
self.mock_execute.assert_has_calls([
mock.call('truncate table "test_schema".test_table', None)
])

def test_quoting_on_rename(self):
PostgresAdapter.rename(
profile=self.profile,
project_cfg=self.project,
schema='test_schema',
from_name='table_a',
to_name='table_b'
)
self.mock_execute.assert_has_calls([
mock.call('alter table "test_schema".table_a rename to table_b', None)
])
94 changes: 94 additions & 0 deletions test/unit/test_snowflake_adapter.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
import mock
import unittest

import dbt.flags as flags

import dbt.adapters
from dbt.adapters.snowflake import SnowflakeAdapter
from dbt.exceptions import ValidationException
from dbt.logger import GLOBAL_LOGGER as logger # noqa
from snowflake import connector as snowflake_connector

class TestSnowflakeAdapter(unittest.TestCase):
def setUp(self):
flags.STRICT_MODE = False

self.profile = {
'dbname': 'postgres',
'user': 'root',
'host': 'database',
'pass': 'password',
'port': 5432,
'schema': 'public'
}

self.project = {
'name': 'X',
'version': '0.1',
'profile': 'test',
'project-root': '/tmp/dbt/does-not-exist',
'quoting': {
'identifier': False,
'schema': True,
}
}

self.handle = mock.MagicMock(spec=snowflake_connector.SnowflakeConnection)
self.cursor = self.handle.cursor.return_value
self.mock_execute = self.cursor.execute
self.patcher = mock.patch('dbt.adapters.snowflake.impl.snowflake.connector.connect')
self.snowflake = self.patcher.start()

self.snowflake.return_value = self.handle
conn = SnowflakeAdapter.get_connection(self.profile)

def tearDown(self):
# we want a unique self.handle every time.
SnowflakeAdapter.cleanup_connections()
self.patcher.stop()

def test_quoting_on_drop_schema(self):
SnowflakeAdapter.drop_schema(
profile=self.profile,
project_cfg=self.project,
schema='test_schema'
)

self.mock_execute.assert_has_calls([
mock.call('drop schema if exists "test_schema" cascade', None)
])

def test_quoting_on_drop(self):
SnowflakeAdapter.drop(
profile=self.profile,
project_cfg=self.project,
schema='test_schema',
relation='test_table',
relation_type='table'
)
self.mock_execute.assert_has_calls([
mock.call('drop table if exists "test_schema".test_table cascade', None)
])

def test_quoting_on_truncate(self):
SnowflakeAdapter.truncate(
profile=self.profile,
project_cfg=self.project,
schema='test_schema',
table='test_table'
)
self.mock_execute.assert_has_calls([
mock.call('truncate table "test_schema".test_table', None)
])

def test_quoting_on_rename(self):
SnowflakeAdapter.rename(
profile=self.profile,
project_cfg=self.project,
schema='test_schema',
from_name='table_a',
to_name='table_b'
)
self.mock_execute.assert_has_calls([
mock.call('alter table "test_schema".table_a rename to table_b', None)
])

0 comments on commit 16fa082

Please sign in to comment.