Skip to content

Commit

Permalink
Merge pull request #1646 from fishtown-analytics/feature/know-about-f…
Browse files Browse the repository at this point in the history
…iles

Feature: Partial Parsing
  • Loading branch information
beckjake authored Aug 21, 2019
2 parents 403d000 + fe2a9fe commit 01534c1
Show file tree
Hide file tree
Showing 104 changed files with 4,141 additions and 4,179 deletions.
2 changes: 1 addition & 1 deletion .bumpversion.cfg
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[bumpversion]
current_version = 0.14.0
current_version = 0.15.0a1
parse = (?P<major>\d+)
\.(?P<minor>\d+)
\.(?P<patch>\d+)
Expand Down
2 changes: 1 addition & 1 deletion .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ jobs:
PGUSER: root
PGPASSWORD: password
PGDATABASE: postgres
- run: tox -e flake8,unit-py36
- run: tox -e flake8,mypy,unit-py36
integration-postgres-py36:
docker: *test_and_postgres
steps:
Expand Down
4 changes: 4 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ var/
*.egg-info/
.installed.cfg
*.egg
*.mypy_cache/
logs/

# PyInstaller
Expand Down Expand Up @@ -79,3 +80,6 @@ target/

# Vim
*.sw*

# pycharm
.idea/
1 change: 0 additions & 1 deletion core/dbt/__init__.py

This file was deleted.

1 change: 0 additions & 1 deletion core/dbt/adapters/__init__.py

This file was deleted.

2 changes: 2 additions & 0 deletions core/dbt/adapters/base/connections.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,8 @@ class Credentials(
Replaceable,
metaclass=abc.ABCMeta
):
database: str
schema: str
_ALIASES: ClassVar[Dict[str, str]] = field(default={}, init=False)

@abc.abstractproperty
Expand Down
10 changes: 8 additions & 2 deletions core/dbt/adapters/base/impl.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import dbt.flags
import dbt.clients.agate_helper

from dbt.contracts.graph.manifest import Manifest
from dbt.node_types import NodeType
from dbt.loader import GraphLoader
from dbt.logger import GLOBAL_LOGGER as logger
Expand Down Expand Up @@ -252,8 +253,7 @@ def type(cls):
@property
def _internal_manifest(self):
if self._internal_manifest_lazy is None:
manifest = GraphLoader.load_internal(self.config)
self._internal_manifest_lazy = manifest
self.load_internal_manifest()
return self._internal_manifest_lazy

def check_internal_manifest(self):
Expand All @@ -262,6 +262,12 @@ def check_internal_manifest(self):
"""
return self._internal_manifest_lazy

def load_internal_manifest(self) -> Manifest:
if self._internal_manifest_lazy is None:
manifest = GraphLoader.load_internal(self.config)
self._internal_manifest_lazy = manifest
return self._internal_manifest_lazy

###
# Caching methods
###
Expand Down
26 changes: 15 additions & 11 deletions core/dbt/clients/jinja.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import linecache
import os
import tempfile
from typing import List, Union, Set, Optional

import jinja2
import jinja2._compat
Expand All @@ -13,7 +14,7 @@
import dbt.exceptions
import dbt.utils

from dbt.clients._jinja_blocks import BlockIterator
from dbt.clients._jinja_blocks import BlockIterator, BlockData, BlockTag

from dbt.logger import GLOBAL_LOGGER as logger # noqa

Expand Down Expand Up @@ -305,21 +306,24 @@ def undefined_error(msg):
raise jinja2.exceptions.UndefinedError(msg)


def extract_toplevel_blocks(data, allowed_blocks=None, collect_raw_data=True):
def extract_toplevel_blocks(
data: str,
allowed_blocks: Optional[Set[str]] = None,
collect_raw_data: bool = True,
) -> List[Union[BlockData, BlockTag]]:
"""Extract the top level blocks with matching block types from a jinja
file, with some special handling for block nesting.
:param str data: The data to extract blocks from.
:param Optional[Set[str]] allowed_blocks: The names of the blocks to
extract from the file. They may not be nested within if/for blocks.
If None, use the default values.
:param bool collect_raw_data: If set, raw data between matched blocks will
also be part of the results, as `BlockData` objects. They have a
:param data: The data to extract blocks from.
:param allowed_blocks: The names of the blocks to extract from the file.
They may not be nested within if/for blocks. If None, use the default
values.
:param collect_raw_data: If set, raw data between matched blocks will also
be part of the results, as `BlockData` objects. They have a
`block_type_name` field of `'__dbt_data'` and will never have a
`block_name`.
:return List[Union[BlockData, BlockTag]]: A list of `BlockTag`s matching
the allowed block types and (if `collect_raw_data` is `True`)
`BlockData` objects.
:return: A list of `BlockTag`s matching the allowed block types and (if
`collect_raw_data` is `True`) `BlockData` objects.
"""
return BlockIterator(data).lex_for_blocks(
allowed_blocks=allowed_blocks,
Expand Down
6 changes: 3 additions & 3 deletions core/dbt/clients/system.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,13 +24,13 @@ def find_matching(root_path,
absolute root path (`relative_paths_to_search`), and a `file_pattern`
like '*.sql', returns information about the files. For example:
> find_matching('/root/path', 'models', '*.sql')
> find_matching('/root/path', ['models'], '*.sql')
[ { 'absolute_path': '/root/path/models/model_one.sql',
'relative_path': 'models/model_one.sql',
'relative_path': 'model_one.sql',
'searched_path': 'models' },
{ 'absolute_path': '/root/path/models/subdirectory/model_two.sql',
'relative_path': 'models/subdirectory/model_two.sql',
'relative_path': 'subdirectory/model_two.sql',
'searched_path': 'models' } ]
"""
matching = []
Expand Down
5 changes: 3 additions & 2 deletions core/dbt/compilation.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ def recursively_prepend_ctes(model, manifest):

model.prepend_ctes(prepended_ctes)

manifest.nodes[model.unique_id] = model
manifest.update_node(model)

return (model, prepended_ctes, manifest)

Expand Down Expand Up @@ -167,7 +167,8 @@ def compile_node(self, node, manifest, extra_context=None):
def write_graph_file(self, linker, manifest):
filename = graph_file_name
graph_path = os.path.join(self.config.target_path, filename)
linker.write_graph(graph_path, manifest)
if dbt.flags.WRITE_JSON:
linker.write_graph(graph_path, manifest)

def link_node(self, linker, node, manifest):
linker.add_node(node.unique_id)
Expand Down
2 changes: 1 addition & 1 deletion core/dbt/config/project.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
from dbt.ui import printer
from dbt.utils import deep_map
from dbt.utils import parse_cli_vars
from dbt.parser.source_config import SourceConfig
from dbt.source_config import SourceConfig

from dbt.contracts.project import Project as ProjectContract
from dbt.contracts.project import PackageConfig
Expand Down
2 changes: 1 addition & 1 deletion core/dbt/context/runtime.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import dbt.clients.jinja
import dbt.context.common
import dbt.flags
from dbt.parser import ParserUtils
from dbt.parser.util import ParserUtils

from dbt.logger import GLOBAL_LOGGER as logger # noqa

Expand Down
17 changes: 17 additions & 0 deletions core/dbt/contracts/graph/compiled.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
ParsedSourceDefinition,
ParsedTestNode,
TestConfig,
PARSED_TYPES,
)
from dbt.node_types import (
NodeType,
Expand Down Expand Up @@ -96,6 +97,11 @@ class CompiledRPCNode(CompiledNode):
class CompiledSeedNode(CompiledNode):
resource_type: SeedType

@property
def empty(self):
""" Seeds are never empty"""
return False


@dataclass
class CompiledSnapshotNode(CompiledNode):
Expand Down Expand Up @@ -187,6 +193,17 @@ def compiled_type_for(parsed: ParsedNode):
return type(parsed)


def parsed_instance_for(compiled: CompiledNode) -> ParsedNode:
cls = PARSED_TYPES.get(compiled.resource_type)
if cls is None:
# how???
raise ValueError('invalid resource_type: {}'
.format(compiled.resource_type))

# validate=False to allow extra keys from copmiling
return cls.from_dict(compiled.to_dict(), validate=False)


# We allow either parsed or compiled nodes, or parsed sources, as some
# 'compile()' calls in the runner actually just return the original parsed
# node they were given.
Expand Down
Loading

0 comments on commit 01534c1

Please sign in to comment.