mirror of
https://github.com/dbt-labs/dbt-core
synced 2025-12-19 19:41:28 +00:00
Compare commits
37 Commits
enable-pos
...
0.19.lates
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
745ae3e64a | ||
|
|
4ee41c9347 | ||
|
|
c662e46d4d | ||
|
|
0fd73249d2 | ||
|
|
96d89d3649 | ||
|
|
c409492134 | ||
|
|
c432601e6d | ||
|
|
9a5a0516e8 | ||
|
|
a14b71092d | ||
|
|
baeab71c45 | ||
|
|
cc64906e52 | ||
|
|
79ea1c2fc2 | ||
|
|
db2ccf2719 | ||
|
|
8ade68abc3 | ||
|
|
2925f98a52 | ||
|
|
4abcd18d26 | ||
|
|
7bf2fd649a | ||
|
|
903e6de0ca | ||
|
|
1edb8a60c8 | ||
|
|
2e7d700e59 | ||
|
|
908b275570 | ||
|
|
116fe428ff | ||
|
|
ec907b46e8 | ||
|
|
b55d461abe | ||
|
|
841b2115a4 | ||
|
|
114fc0e6cf | ||
|
|
21c6294c09 | ||
|
|
cc9472d865 | ||
|
|
974cb516e6 | ||
|
|
eabbbb2c76 | ||
|
|
4bb7a84182 | ||
|
|
5c66893b01 | ||
|
|
27a08cdbf9 | ||
|
|
e47d642d3f | ||
|
|
597448d9ef | ||
|
|
47b345a0c2 | ||
|
|
de864a7eed |
@@ -1,5 +1,5 @@
|
|||||||
[bumpversion]
|
[bumpversion]
|
||||||
current_version = 0.19.1b2
|
current_version = 0.19.2
|
||||||
parse = (?P<major>\d+)
|
parse = (?P<major>\d+)
|
||||||
\.(?P<minor>\d+)
|
\.(?P<minor>\d+)
|
||||||
\.(?P<patch>\d+)
|
\.(?P<patch>\d+)
|
||||||
|
|||||||
42
CHANGELOG.md
42
CHANGELOG.md
@@ -1,11 +1,47 @@
|
|||||||
## dbt 0.19.1 (Release TBD)
|
## dbt 0.19.2 (June 28, 2021)
|
||||||
|
|
||||||
|
### Fixes
|
||||||
|
- Fix infinite recursion when parsing schema tests due to loops in macro calls ([#3444](https://github.com/fishtown-analytics/dbt/issues/3344), [#3454](https://github.com/fishtown-analytics/dbt/pull/3454))
|
||||||
|
|
||||||
|
## dbt 0.19.2rc2 (June 03, 2021)
|
||||||
|
|
||||||
|
### Breaking changes
|
||||||
|
- Fix adapter.dispatch macro resolution when statically extracting macros. Introduce new project-level `dispatch` config. The `packages` argument to `dispatch` no longer supports macro calls; there is backwards compatibility for existing packages. The argument will no longer be supported in a future release, instead provide the `macro_namespace` argument. ([#3362](https://github.com/fishtown-analytics/dbt/issues/3362), [#3363](https://github.com/fishtown-analytics/dbt/pull/3363), [#3383](https://github.com/fishtown-analytics/dbt/pull/3383), [#3403](https://github.com/fishtown-analytics/dbt/pull/3403))
|
||||||
|
|
||||||
|
### Fixes
|
||||||
|
- Fix references to macros with package names when rendering schema tests ([#3324](https://github.com/fishtown-analytics/dbt/issues/3324), [#3345](https://github.com/fishtown-analytics/dbt/pull/3345))
|
||||||
|
|
||||||
|
## dbt 0.19.2rc1 (April 28, 2021)
|
||||||
|
|
||||||
|
### Fixes
|
||||||
|
- Ensure that schema test macros are properly processed ([#3229](https://github.com/fishtown-analytics/dbt/issues/3229), [#3272](https://github.com/fishtown-analytics/dbt/pull/3272))
|
||||||
|
- Fix regression for default project/database for BigQuery connections ([#3218](https://github.com/fishtown-analytics/dbt/issues/3218), [#3305](https://github.com/fishtown-analytics/dbt/pull/3305))
|
||||||
|
|
||||||
|
## dbt 0.19.1 (March 31, 2021)
|
||||||
|
|
||||||
|
## dbt 0.19.1rc2 (March 25, 2021)
|
||||||
|
|
||||||
|
### Fixes
|
||||||
|
- Pass service-account scopes to gcloud-based oauth ([#3040](https://github.com/fishtown-analytics/dbt/issues/3040), [#3041](https://github.com/fishtown-analytics/dbt/pull/3041))
|
||||||
|
|
||||||
|
Contributors:
|
||||||
|
- [@yu-iskw](https://github.com/yu-iskw) ([#3041](https://github.com/fishtown-analytics/dbt/pull/3041))
|
||||||
|
|
||||||
|
## dbt 0.19.1rc1 (March 15, 2021)
|
||||||
|
|
||||||
|
### Under the hood
|
||||||
|
- Update code to use Mashumaro 2.0 ([#3138](https://github.com/fishtown-analytics/dbt/pull/3138))
|
||||||
|
- Pin `agate<1.6.2` to avoid installation errors relating to its new dependency `PyICU` ([#3160](https://github.com/fishtown-analytics/dbt/issues/3160), [#3161](https://github.com/fishtown-analytics/dbt/pull/3161))
|
||||||
|
- Add an event to track resource counts ([#3050](https://github.com/fishtown-analytics/dbt/issues/3050), [#3157](https://github.com/fishtown-analytics/dbt/pull/3157))
|
||||||
|
|
||||||
|
### Fixes
|
||||||
|
|
||||||
|
- Fix compiled sql for ephemeral models ([#3139](https://github.com/fishtown-analytics/dbt/pull/3139), [#3056](https://github.com/fishtown-analytics/dbt/pull/3056))
|
||||||
|
|
||||||
## dbt 0.19.1b2 (February 15, 2021)
|
## dbt 0.19.1b2 (February 15, 2021)
|
||||||
|
|
||||||
|
|
||||||
## dbt 0.19.1b1 (February 12, 2021)
|
## dbt 0.19.1b1 (February 12, 2021)
|
||||||
|
|
||||||
|
|
||||||
### Fixes
|
### Fixes
|
||||||
|
|
||||||
- On BigQuery, fix regressions for `insert_overwrite` incremental strategy with `int64` and `timestamp` partition columns ([#3063](https://github.com/fishtown-analytics/dbt/issues/3063), [#3095](https://github.com/fishtown-analytics/dbt/issues/3095), [#3098](https://github.com/fishtown-analytics/dbt/issues/3098))
|
- On BigQuery, fix regressions for `insert_overwrite` incremental strategy with `int64` and `timestamp` partition columns ([#3063](https://github.com/fishtown-analytics/dbt/issues/3063), [#3095](https://github.com/fishtown-analytics/dbt/issues/3095), [#3098](https://github.com/fishtown-analytics/dbt/issues/3098))
|
||||||
|
|||||||
@@ -45,7 +45,7 @@ class BaseRelation(FakeAPIObject, Hashable):
|
|||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
if not isinstance(other, self.__class__):
|
if not isinstance(other, self.__class__):
|
||||||
return False
|
return False
|
||||||
return self.to_dict() == other.to_dict()
|
return self.to_dict(omit_none=True) == other.to_dict(omit_none=True)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_default_quote_policy(cls) -> Policy:
|
def get_default_quote_policy(cls) -> Policy:
|
||||||
@@ -185,10 +185,10 @@ class BaseRelation(FakeAPIObject, Hashable):
|
|||||||
def create_from_source(
|
def create_from_source(
|
||||||
cls: Type[Self], source: ParsedSourceDefinition, **kwargs: Any
|
cls: Type[Self], source: ParsedSourceDefinition, **kwargs: Any
|
||||||
) -> Self:
|
) -> Self:
|
||||||
source_quoting = source.quoting.to_dict()
|
source_quoting = source.quoting.to_dict(omit_none=True)
|
||||||
source_quoting.pop('column', None)
|
source_quoting.pop('column', None)
|
||||||
quote_policy = deep_merge(
|
quote_policy = deep_merge(
|
||||||
cls.get_default_quote_policy().to_dict(),
|
cls.get_default_quote_policy().to_dict(omit_none=True),
|
||||||
source_quoting,
|
source_quoting,
|
||||||
kwargs.get('quote_policy', {}),
|
kwargs.get('quote_policy', {}),
|
||||||
)
|
)
|
||||||
|
|||||||
222
core/dbt/clients/jinja_static.py
Normal file
222
core/dbt/clients/jinja_static.py
Normal file
@@ -0,0 +1,222 @@
|
|||||||
|
import jinja2
|
||||||
|
from dbt.clients.jinja import get_environment
|
||||||
|
from dbt.exceptions import raise_compiler_error
|
||||||
|
|
||||||
|
|
||||||
|
def statically_extract_macro_calls(string, ctx, db_wrapper=None):
|
||||||
|
# set 'capture_macros' to capture undefined
|
||||||
|
env = get_environment(None, capture_macros=True)
|
||||||
|
parsed = env.parse(string)
|
||||||
|
|
||||||
|
standard_calls = ['source', 'ref', 'config']
|
||||||
|
possible_macro_calls = []
|
||||||
|
for func_call in parsed.find_all(jinja2.nodes.Call):
|
||||||
|
func_name = None
|
||||||
|
if hasattr(func_call, 'node') and hasattr(func_call.node, 'name'):
|
||||||
|
func_name = func_call.node.name
|
||||||
|
else:
|
||||||
|
# func_call for dbt_utils.current_timestamp macro
|
||||||
|
# Call(
|
||||||
|
# node=Getattr(
|
||||||
|
# node=Name(
|
||||||
|
# name='dbt_utils',
|
||||||
|
# ctx='load'
|
||||||
|
# ),
|
||||||
|
# attr='current_timestamp',
|
||||||
|
# ctx='load
|
||||||
|
# ),
|
||||||
|
# args=[],
|
||||||
|
# kwargs=[],
|
||||||
|
# dyn_args=None,
|
||||||
|
# dyn_kwargs=None
|
||||||
|
# )
|
||||||
|
if (hasattr(func_call, 'node') and
|
||||||
|
hasattr(func_call.node, 'node') and
|
||||||
|
type(func_call.node.node).__name__ == 'Name' and
|
||||||
|
hasattr(func_call.node, 'attr')):
|
||||||
|
package_name = func_call.node.node.name
|
||||||
|
macro_name = func_call.node.attr
|
||||||
|
if package_name == 'adapter':
|
||||||
|
if macro_name == 'dispatch':
|
||||||
|
ad_macro_calls = statically_parse_adapter_dispatch(
|
||||||
|
func_call, ctx, db_wrapper)
|
||||||
|
possible_macro_calls.extend(ad_macro_calls)
|
||||||
|
else:
|
||||||
|
# This skips calls such as adapter.parse_index
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
func_name = f'{package_name}.{macro_name}'
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
if not func_name:
|
||||||
|
continue
|
||||||
|
if func_name in standard_calls:
|
||||||
|
continue
|
||||||
|
elif ctx.get(func_name):
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
if func_name not in possible_macro_calls:
|
||||||
|
possible_macro_calls.append(func_name)
|
||||||
|
|
||||||
|
return possible_macro_calls
|
||||||
|
|
||||||
|
|
||||||
|
# Call(
|
||||||
|
# node=Getattr(
|
||||||
|
# node=Name(
|
||||||
|
# name='adapter',
|
||||||
|
# ctx='load'
|
||||||
|
# ),
|
||||||
|
# attr='dispatch',
|
||||||
|
# ctx='load'
|
||||||
|
# ),
|
||||||
|
# args=[
|
||||||
|
# Const(value='test_pkg_and_dispatch')
|
||||||
|
# ],
|
||||||
|
# kwargs=[
|
||||||
|
# Keyword(
|
||||||
|
# key='packages',
|
||||||
|
# value=Call(node=Getattr(node=Name(name='local_utils', ctx='load'),
|
||||||
|
# attr='_get_utils_namespaces', ctx='load'), args=[], kwargs=[],
|
||||||
|
# dyn_args=None, dyn_kwargs=None)
|
||||||
|
# )
|
||||||
|
# ],
|
||||||
|
# dyn_args=None,
|
||||||
|
# dyn_kwargs=None
|
||||||
|
# )
|
||||||
|
def statically_parse_adapter_dispatch(func_call, ctx, db_wrapper):
|
||||||
|
possible_macro_calls = []
|
||||||
|
# This captures an adapter.dispatch('<macro_name>') call.
|
||||||
|
|
||||||
|
func_name = None
|
||||||
|
# macro_name positional argument
|
||||||
|
if len(func_call.args) > 0:
|
||||||
|
func_name = func_call.args[0].value
|
||||||
|
if func_name:
|
||||||
|
possible_macro_calls.append(func_name)
|
||||||
|
|
||||||
|
# packages positional argument
|
||||||
|
packages = []
|
||||||
|
macro_namespace = None
|
||||||
|
packages_arg = None
|
||||||
|
packages_arg_type = None
|
||||||
|
|
||||||
|
if len(func_call.args) > 1:
|
||||||
|
packages_arg = func_call.args[1]
|
||||||
|
# This can be a List or a Call
|
||||||
|
packages_arg_type = type(func_call.args[1]).__name__
|
||||||
|
|
||||||
|
# keyword arguments
|
||||||
|
if func_call.kwargs:
|
||||||
|
for kwarg in func_call.kwargs:
|
||||||
|
if kwarg.key == 'packages':
|
||||||
|
# The packages keyword will be deprecated and
|
||||||
|
# eventually removed
|
||||||
|
packages_arg = kwarg.value
|
||||||
|
# This can be a List or a Call
|
||||||
|
packages_arg_type = type(kwarg.value).__name__
|
||||||
|
elif kwarg.key == 'macro_name':
|
||||||
|
# This will remain to enable static resolution
|
||||||
|
if type(kwarg.value).__name__ == 'Const':
|
||||||
|
func_name = kwarg.value.value
|
||||||
|
possible_macro_calls.append(func_name)
|
||||||
|
else:
|
||||||
|
raise_compiler_error(f"The macro_name parameter ({kwarg.value.value}) "
|
||||||
|
"to adapter.dispatch was not a string")
|
||||||
|
elif kwarg.key == 'macro_namespace':
|
||||||
|
# This will remain to enable static resolution
|
||||||
|
kwarg_type = type(kwarg.value).__name__
|
||||||
|
if kwarg_type == 'Const':
|
||||||
|
macro_namespace = kwarg.value.value
|
||||||
|
else:
|
||||||
|
raise_compiler_error("The macro_namespace parameter to adapter.dispatch "
|
||||||
|
f"is a {kwarg_type}, not a string")
|
||||||
|
|
||||||
|
# positional arguments
|
||||||
|
if packages_arg:
|
||||||
|
if packages_arg_type == 'List':
|
||||||
|
# This will remain to enable static resolution
|
||||||
|
packages = []
|
||||||
|
for item in packages_arg.items:
|
||||||
|
packages.append(item.value)
|
||||||
|
elif packages_arg_type == 'Const':
|
||||||
|
# This will remain to enable static resolution
|
||||||
|
macro_namespace = packages_arg.value
|
||||||
|
elif packages_arg_type == 'Call':
|
||||||
|
# This is deprecated and should be removed eventually.
|
||||||
|
# It is here to support (hackily) common ways of providing
|
||||||
|
# a packages list to adapter.dispatch
|
||||||
|
if (hasattr(packages_arg, 'node') and
|
||||||
|
hasattr(packages_arg.node, 'node') and
|
||||||
|
hasattr(packages_arg.node.node, 'name') and
|
||||||
|
hasattr(packages_arg.node, 'attr')):
|
||||||
|
package_name = packages_arg.node.node.name
|
||||||
|
macro_name = packages_arg.node.attr
|
||||||
|
if (macro_name.startswith('_get') and 'namespaces' in macro_name):
|
||||||
|
# noqa: https://github.com/fishtown-analytics/dbt-utils/blob/9e9407b/macros/cross_db_utils/_get_utils_namespaces.sql
|
||||||
|
var_name = f'{package_name}_dispatch_list'
|
||||||
|
# hard code compatibility for fivetran_utils, just a teensy bit different
|
||||||
|
# noqa: https://github.com/fivetran/dbt_fivetran_utils/blob/0978ba2/macros/_get_utils_namespaces.sql
|
||||||
|
if package_name == 'fivetran_utils':
|
||||||
|
default_packages = ['dbt_utils', 'fivetran_utils']
|
||||||
|
else:
|
||||||
|
default_packages = [package_name]
|
||||||
|
|
||||||
|
namespace_names = get_dispatch_list(ctx, var_name, default_packages)
|
||||||
|
if namespace_names:
|
||||||
|
packages.extend(namespace_names)
|
||||||
|
else:
|
||||||
|
msg = (
|
||||||
|
f"As of v0.19.2, custom macros, such as '{macro_name}', are no longer "
|
||||||
|
"supported in the 'packages' argument of 'adapter.dispatch()'.\n"
|
||||||
|
f"See https://docs.getdbt.com/reference/dbt-jinja-functions/dispatch "
|
||||||
|
"for details."
|
||||||
|
).strip()
|
||||||
|
raise_compiler_error(msg)
|
||||||
|
elif packages_arg_type == 'Add':
|
||||||
|
# This logic is for when there is a variable and an addition of a list,
|
||||||
|
# like: packages = (var('local_utils_dispatch_list', []) + ['local_utils2'])
|
||||||
|
# This is deprecated and should be removed eventually.
|
||||||
|
namespace_var = None
|
||||||
|
default_namespaces = []
|
||||||
|
# This might be a single call or it might be the 'left' piece in an addition
|
||||||
|
for var_call in packages_arg.find_all(jinja2.nodes.Call):
|
||||||
|
if (hasattr(var_call, 'node') and
|
||||||
|
var_call.node.name == 'var' and
|
||||||
|
hasattr(var_call, 'args')):
|
||||||
|
namespace_var = var_call.args[0].value
|
||||||
|
if hasattr(packages_arg, 'right'): # we have a default list of namespaces
|
||||||
|
for item in packages_arg.right.items:
|
||||||
|
default_namespaces.append(item.value)
|
||||||
|
if namespace_var:
|
||||||
|
namespace_names = get_dispatch_list(ctx, namespace_var, default_namespaces)
|
||||||
|
if namespace_names:
|
||||||
|
packages.extend(namespace_names)
|
||||||
|
|
||||||
|
if db_wrapper:
|
||||||
|
if not packages:
|
||||||
|
if macro_namespace:
|
||||||
|
packages = macro_namespace
|
||||||
|
else:
|
||||||
|
packages = None # empty list behaves differently than None...
|
||||||
|
macro = db_wrapper.dispatch(func_name, packages).macro
|
||||||
|
func_name = f'{macro.package_name}.{macro.name}'
|
||||||
|
possible_macro_calls.append(func_name)
|
||||||
|
else: # this is only for test/unit/test_macro_calls.py
|
||||||
|
if macro_namespace:
|
||||||
|
packages = [macro_namespace]
|
||||||
|
for package_name in packages:
|
||||||
|
possible_macro_calls.append(f'{package_name}.{func_name}')
|
||||||
|
|
||||||
|
return possible_macro_calls
|
||||||
|
|
||||||
|
|
||||||
|
def get_dispatch_list(ctx, var_name, default_packages):
|
||||||
|
namespace_list = None
|
||||||
|
try:
|
||||||
|
# match the logic currently used in package _get_namespaces() macro
|
||||||
|
namespace_list = ctx['var'](var_name) + default_packages
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
namespace_list = namespace_list if namespace_list else default_packages
|
||||||
|
return namespace_list
|
||||||
@@ -30,6 +30,7 @@ from dbt.graph import Graph
|
|||||||
from dbt.logger import GLOBAL_LOGGER as logger
|
from dbt.logger import GLOBAL_LOGGER as logger
|
||||||
from dbt.node_types import NodeType
|
from dbt.node_types import NodeType
|
||||||
from dbt.utils import pluralize
|
from dbt.utils import pluralize
|
||||||
|
import dbt.tracking
|
||||||
|
|
||||||
graph_file_name = 'graph.gpickle'
|
graph_file_name = 'graph.gpickle'
|
||||||
|
|
||||||
@@ -58,6 +59,11 @@ def print_compile_stats(stats):
|
|||||||
results = {k: 0 for k in names.keys()}
|
results = {k: 0 for k in names.keys()}
|
||||||
results.update(stats)
|
results.update(stats)
|
||||||
|
|
||||||
|
# create tracking event for resource_counts
|
||||||
|
if dbt.tracking.active_user is not None:
|
||||||
|
resource_counts = {k.pluralize(): v for k, v in results.items()}
|
||||||
|
dbt.tracking.track_resource_counts(resource_counts)
|
||||||
|
|
||||||
stat_line = ", ".join([
|
stat_line = ", ".join([
|
||||||
pluralize(ct, names.get(t)) for t, ct in results.items()
|
pluralize(ct, names.get(t)) for t, ct in results.items()
|
||||||
if t in names
|
if t in names
|
||||||
@@ -138,7 +144,7 @@ class Linker:
|
|||||||
"""
|
"""
|
||||||
out_graph = self.graph.copy()
|
out_graph = self.graph.copy()
|
||||||
for node_id in self.graph.nodes():
|
for node_id in self.graph.nodes():
|
||||||
data = manifest.expect(node_id).to_dict()
|
data = manifest.expect(node_id).to_dict(omit_none=True)
|
||||||
out_graph.add_node(node_id, **data)
|
out_graph.add_node(node_id, **data)
|
||||||
nx.write_gpickle(out_graph, outfile)
|
nx.write_gpickle(out_graph, outfile)
|
||||||
|
|
||||||
@@ -248,19 +254,19 @@ class Compiler:
|
|||||||
def _get_dbt_test_name(self) -> str:
|
def _get_dbt_test_name(self) -> str:
|
||||||
return 'dbt__CTE__INTERNAL_test'
|
return 'dbt__CTE__INTERNAL_test'
|
||||||
|
|
||||||
# This method is called by the 'compile_node' method. Starting
|
|
||||||
# from the node that it is passed in, it will recursively call
|
|
||||||
# itself using the 'extra_ctes'. The 'ephemeral' models do
|
|
||||||
# not produce SQL that is executed directly, instead they
|
|
||||||
# are rolled up into the models that refer to them by
|
|
||||||
# inserting CTEs into the SQL.
|
|
||||||
def _recursively_prepend_ctes(
|
def _recursively_prepend_ctes(
|
||||||
self,
|
self,
|
||||||
model: NonSourceCompiledNode,
|
model: NonSourceCompiledNode,
|
||||||
manifest: Manifest,
|
manifest: Manifest,
|
||||||
extra_context: Optional[Dict[str, Any]],
|
extra_context: Optional[Dict[str, Any]],
|
||||||
) -> Tuple[NonSourceCompiledNode, List[InjectedCTE]]:
|
) -> Tuple[NonSourceCompiledNode, List[InjectedCTE]]:
|
||||||
|
"""This method is called by the 'compile_node' method. Starting
|
||||||
|
from the node that it is passed in, it will recursively call
|
||||||
|
itself using the 'extra_ctes'. The 'ephemeral' models do
|
||||||
|
not produce SQL that is executed directly, instead they
|
||||||
|
are rolled up into the models that refer to them by
|
||||||
|
inserting CTEs into the SQL.
|
||||||
|
"""
|
||||||
if model.compiled_sql is None:
|
if model.compiled_sql is None:
|
||||||
raise RuntimeException(
|
raise RuntimeException(
|
||||||
'Cannot inject ctes into an unparsed node', model
|
'Cannot inject ctes into an unparsed node', model
|
||||||
@@ -324,22 +330,22 @@ class Compiler:
|
|||||||
_extend_prepended_ctes(prepended_ctes, new_prepended_ctes)
|
_extend_prepended_ctes(prepended_ctes, new_prepended_ctes)
|
||||||
|
|
||||||
new_cte_name = self.add_ephemeral_prefix(cte_model.name)
|
new_cte_name = self.add_ephemeral_prefix(cte_model.name)
|
||||||
sql = f' {new_cte_name} as (\n{cte_model.compiled_sql}\n)'
|
rendered_sql = (
|
||||||
|
cte_model._pre_injected_sql or cte_model.compiled_sql
|
||||||
|
)
|
||||||
|
sql = f' {new_cte_name} as (\n{rendered_sql}\n)'
|
||||||
|
|
||||||
_add_prepended_cte(prepended_ctes, InjectedCTE(id=cte.id, sql=sql))
|
_add_prepended_cte(prepended_ctes, InjectedCTE(id=cte.id, sql=sql))
|
||||||
|
|
||||||
# We don't save injected_sql into compiled sql for ephemeral models
|
injected_sql = self._inject_ctes_into_sql(
|
||||||
# because it will cause problems with processing of subsequent models.
|
model.compiled_sql,
|
||||||
# Ephemeral models do not produce executable SQL of their own.
|
prepended_ctes,
|
||||||
if not model.is_ephemeral_model:
|
)
|
||||||
injected_sql = self._inject_ctes_into_sql(
|
model._pre_injected_sql = model.compiled_sql
|
||||||
model.compiled_sql,
|
model.compiled_sql = injected_sql
|
||||||
prepended_ctes,
|
|
||||||
)
|
|
||||||
model.compiled_sql = injected_sql
|
|
||||||
model.extra_ctes_injected = True
|
model.extra_ctes_injected = True
|
||||||
model.extra_ctes = prepended_ctes
|
model.extra_ctes = prepended_ctes
|
||||||
model.validate(model.to_dict())
|
model.validate(model.to_dict(omit_none=True))
|
||||||
|
|
||||||
manifest.update_node(model)
|
manifest.update_node(model)
|
||||||
|
|
||||||
@@ -388,7 +394,7 @@ class Compiler:
|
|||||||
|
|
||||||
logger.debug("Compiling {}".format(node.unique_id))
|
logger.debug("Compiling {}".format(node.unique_id))
|
||||||
|
|
||||||
data = node.to_dict()
|
data = node.to_dict(omit_none=True)
|
||||||
data.update({
|
data.update({
|
||||||
'compiled': False,
|
'compiled': False,
|
||||||
'compiled_sql': None,
|
'compiled_sql': None,
|
||||||
@@ -487,11 +493,6 @@ class Compiler:
|
|||||||
)
|
)
|
||||||
return node
|
return node
|
||||||
|
|
||||||
# This is the main entry point into this code. It's called by
|
|
||||||
# CompileRunner.compile, GenericRPCRunner.compile, and
|
|
||||||
# RunTask.get_hook_sql. It calls '_compile_node' to convert
|
|
||||||
# the node into a compiled node, and then calls the
|
|
||||||
# recursive method to "prepend" the ctes.
|
|
||||||
def compile_node(
|
def compile_node(
|
||||||
self,
|
self,
|
||||||
node: ManifestNode,
|
node: ManifestNode,
|
||||||
@@ -499,6 +500,12 @@ class Compiler:
|
|||||||
extra_context: Optional[Dict[str, Any]] = None,
|
extra_context: Optional[Dict[str, Any]] = None,
|
||||||
write: bool = True,
|
write: bool = True,
|
||||||
) -> NonSourceCompiledNode:
|
) -> NonSourceCompiledNode:
|
||||||
|
"""This is the main entry point into this code. It's called by
|
||||||
|
CompileRunner.compile, GenericRPCRunner.compile, and
|
||||||
|
RunTask.get_hook_sql. It calls '_compile_node' to convert
|
||||||
|
the node into a compiled node, and then calls the
|
||||||
|
recursive method to "prepend" the ctes.
|
||||||
|
"""
|
||||||
node = self._compile_node(node, manifest, extra_context)
|
node = self._compile_node(node, manifest, extra_context)
|
||||||
|
|
||||||
node, _ = self._recursively_prepend_ctes(
|
node, _ = self._recursively_prepend_ctes(
|
||||||
|
|||||||
@@ -111,8 +111,8 @@ class Profile(HasCredentials):
|
|||||||
'credentials': self.credentials,
|
'credentials': self.credentials,
|
||||||
}
|
}
|
||||||
if serialize_credentials:
|
if serialize_credentials:
|
||||||
result['config'] = self.config.to_dict()
|
result['config'] = self.config.to_dict(omit_none=True)
|
||||||
result['credentials'] = self.credentials.to_dict()
|
result['credentials'] = self.credentials.to_dict(omit_none=True)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def to_target_dict(self) -> Dict[str, Any]:
|
def to_target_dict(self) -> Dict[str, Any]:
|
||||||
@@ -125,7 +125,7 @@ class Profile(HasCredentials):
|
|||||||
'name': self.target_name,
|
'name': self.target_name,
|
||||||
'target_name': self.target_name,
|
'target_name': self.target_name,
|
||||||
'profile_name': self.profile_name,
|
'profile_name': self.profile_name,
|
||||||
'config': self.config.to_dict(),
|
'config': self.config.to_dict(omit_none=True),
|
||||||
})
|
})
|
||||||
return target
|
return target
|
||||||
|
|
||||||
@@ -138,7 +138,7 @@ class Profile(HasCredentials):
|
|||||||
def validate(self):
|
def validate(self):
|
||||||
try:
|
try:
|
||||||
if self.credentials:
|
if self.credentials:
|
||||||
dct = self.credentials.to_dict()
|
dct = self.credentials.to_dict(omit_none=True)
|
||||||
self.credentials.validate(dct)
|
self.credentials.validate(dct)
|
||||||
dct = self.to_profile_info(serialize_credentials=True)
|
dct = self.to_profile_info(serialize_credentials=True)
|
||||||
ProfileConfig.validate(dct)
|
ProfileConfig.validate(dct)
|
||||||
|
|||||||
@@ -347,14 +347,16 @@ class PartialProject(RenderComponents):
|
|||||||
# break many things
|
# break many things
|
||||||
quoting: Dict[str, Any] = {}
|
quoting: Dict[str, Any] = {}
|
||||||
if cfg.quoting is not None:
|
if cfg.quoting is not None:
|
||||||
quoting = cfg.quoting.to_dict()
|
quoting = cfg.quoting.to_dict(omit_none=True)
|
||||||
|
|
||||||
|
dispatch: List[Dict[str, Any]]
|
||||||
models: Dict[str, Any]
|
models: Dict[str, Any]
|
||||||
seeds: Dict[str, Any]
|
seeds: Dict[str, Any]
|
||||||
snapshots: Dict[str, Any]
|
snapshots: Dict[str, Any]
|
||||||
sources: Dict[str, Any]
|
sources: Dict[str, Any]
|
||||||
vars_value: VarProvider
|
vars_value: VarProvider
|
||||||
|
|
||||||
|
dispatch = cfg.dispatch
|
||||||
models = cfg.models
|
models = cfg.models
|
||||||
seeds = cfg.seeds
|
seeds = cfg.seeds
|
||||||
snapshots = cfg.snapshots
|
snapshots = cfg.snapshots
|
||||||
@@ -400,6 +402,7 @@ class PartialProject(RenderComponents):
|
|||||||
models=models,
|
models=models,
|
||||||
on_run_start=on_run_start,
|
on_run_start=on_run_start,
|
||||||
on_run_end=on_run_end,
|
on_run_end=on_run_end,
|
||||||
|
dispatch=dispatch,
|
||||||
seeds=seeds,
|
seeds=seeds,
|
||||||
snapshots=snapshots,
|
snapshots=snapshots,
|
||||||
dbt_version=dbt_version,
|
dbt_version=dbt_version,
|
||||||
@@ -510,6 +513,7 @@ class Project:
|
|||||||
models: Dict[str, Any]
|
models: Dict[str, Any]
|
||||||
on_run_start: List[str]
|
on_run_start: List[str]
|
||||||
on_run_end: List[str]
|
on_run_end: List[str]
|
||||||
|
dispatch: List[Dict[str, Any]]
|
||||||
seeds: Dict[str, Any]
|
seeds: Dict[str, Any]
|
||||||
snapshots: Dict[str, Any]
|
snapshots: Dict[str, Any]
|
||||||
sources: Dict[str, Any]
|
sources: Dict[str, Any]
|
||||||
@@ -568,6 +572,7 @@ class Project:
|
|||||||
'models': self.models,
|
'models': self.models,
|
||||||
'on-run-start': self.on_run_start,
|
'on-run-start': self.on_run_start,
|
||||||
'on-run-end': self.on_run_end,
|
'on-run-end': self.on_run_end,
|
||||||
|
'dispatch': self.dispatch,
|
||||||
'seeds': self.seeds,
|
'seeds': self.seeds,
|
||||||
'snapshots': self.snapshots,
|
'snapshots': self.snapshots,
|
||||||
'sources': self.sources,
|
'sources': self.sources,
|
||||||
@@ -578,10 +583,11 @@ class Project:
|
|||||||
'config-version': self.config_version,
|
'config-version': self.config_version,
|
||||||
})
|
})
|
||||||
if self.query_comment:
|
if self.query_comment:
|
||||||
result['query-comment'] = self.query_comment.to_dict()
|
result['query-comment'] = \
|
||||||
|
self.query_comment.to_dict(omit_none=True)
|
||||||
|
|
||||||
if with_packages:
|
if with_packages:
|
||||||
result.update(self.packages.to_dict())
|
result.update(self.packages.to_dict(omit_none=True))
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
@@ -641,3 +647,9 @@ class Project:
|
|||||||
f'{list(self.selectors)}'
|
f'{list(self.selectors)}'
|
||||||
)
|
)
|
||||||
return self.selectors[name]
|
return self.selectors[name]
|
||||||
|
|
||||||
|
def get_macro_search_order(self, macro_namespace: str):
|
||||||
|
for dispatch_entry in self.dispatch:
|
||||||
|
if dispatch_entry['macro_namespace'] == macro_namespace:
|
||||||
|
return dispatch_entry['search_order']
|
||||||
|
return None
|
||||||
|
|||||||
@@ -78,7 +78,7 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
|||||||
get_relation_class_by_name(profile.credentials.type)
|
get_relation_class_by_name(profile.credentials.type)
|
||||||
.get_default_quote_policy()
|
.get_default_quote_policy()
|
||||||
.replace_dict(_project_quoting_dict(project, profile))
|
.replace_dict(_project_quoting_dict(project, profile))
|
||||||
).to_dict()
|
).to_dict(omit_none=True)
|
||||||
|
|
||||||
cli_vars: Dict[str, Any] = parse_cli_vars(getattr(args, 'vars', '{}'))
|
cli_vars: Dict[str, Any] = parse_cli_vars(getattr(args, 'vars', '{}'))
|
||||||
|
|
||||||
@@ -102,6 +102,7 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
|||||||
models=project.models,
|
models=project.models,
|
||||||
on_run_start=project.on_run_start,
|
on_run_start=project.on_run_start,
|
||||||
on_run_end=project.on_run_end,
|
on_run_end=project.on_run_end,
|
||||||
|
dispatch=project.dispatch,
|
||||||
seeds=project.seeds,
|
seeds=project.seeds,
|
||||||
snapshots=project.snapshots,
|
snapshots=project.snapshots,
|
||||||
dbt_version=project.dbt_version,
|
dbt_version=project.dbt_version,
|
||||||
@@ -391,7 +392,7 @@ class UnsetConfig(UserConfig):
|
|||||||
f"'UnsetConfig' object has no attribute {name}"
|
f"'UnsetConfig' object has no attribute {name}"
|
||||||
)
|
)
|
||||||
|
|
||||||
def __post_serialize__(self, dct, options=None):
|
def __post_serialize__(self, dct):
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
|
|
||||||
@@ -480,6 +481,7 @@ class UnsetProfileConfig(RuntimeConfig):
|
|||||||
models=project.models,
|
models=project.models,
|
||||||
on_run_start=project.on_run_start,
|
on_run_start=project.on_run_start,
|
||||||
on_run_end=project.on_run_end,
|
on_run_end=project.on_run_end,
|
||||||
|
dispatch=project.dispatch,
|
||||||
seeds=project.seeds,
|
seeds=project.seeds,
|
||||||
snapshots=project.snapshots,
|
snapshots=project.snapshots,
|
||||||
dbt_version=project.dbt_version,
|
dbt_version=project.dbt_version,
|
||||||
|
|||||||
@@ -538,4 +538,5 @@ class BaseContext(metaclass=ContextMeta):
|
|||||||
|
|
||||||
def generate_base_context(cli_vars: Dict[str, Any]) -> Dict[str, Any]:
|
def generate_base_context(cli_vars: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
ctx = BaseContext(cli_vars)
|
ctx = BaseContext(cli_vars)
|
||||||
|
# This is not a Mashumaro to_dict call
|
||||||
return ctx.to_dict()
|
return ctx.to_dict()
|
||||||
|
|||||||
@@ -75,8 +75,26 @@ class SchemaYamlContext(ConfiguredContext):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class MacroResolvingContext(ConfiguredContext):
|
||||||
|
def __init__(self, config):
|
||||||
|
super().__init__(config)
|
||||||
|
|
||||||
|
@contextproperty
|
||||||
|
def var(self) -> ConfiguredVar:
|
||||||
|
return ConfiguredVar(
|
||||||
|
self._ctx, self.config, self.config.project_name
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def generate_schema_yml(
|
def generate_schema_yml(
|
||||||
config: AdapterRequiredConfig, project_name: str
|
config: AdapterRequiredConfig, project_name: str
|
||||||
) -> Dict[str, Any]:
|
) -> Dict[str, Any]:
|
||||||
ctx = SchemaYamlContext(config, project_name)
|
ctx = SchemaYamlContext(config, project_name)
|
||||||
return ctx.to_dict()
|
return ctx.to_dict()
|
||||||
|
|
||||||
|
|
||||||
|
def generate_macro_context(
|
||||||
|
config: AdapterRequiredConfig,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
ctx = MacroResolvingContext(config)
|
||||||
|
return ctx.to_dict()
|
||||||
|
|||||||
@@ -196,7 +196,7 @@ class ContextConfigGenerator(BaseContextConfigGenerator[C]):
|
|||||||
base=base,
|
base=base,
|
||||||
)
|
)
|
||||||
finalized = config.finalize_and_validate()
|
finalized = config.finalize_and_validate()
|
||||||
return finalized.to_dict()
|
return finalized.to_dict(omit_none=True)
|
||||||
|
|
||||||
|
|
||||||
class UnrenderedConfigGenerator(BaseContextConfigGenerator[Dict[str, Any]]):
|
class UnrenderedConfigGenerator(BaseContextConfigGenerator[Dict[str, Any]]):
|
||||||
|
|||||||
@@ -77,4 +77,5 @@ def generate_runtime_docs(
|
|||||||
current_project: str,
|
current_project: str,
|
||||||
) -> Dict[str, Any]:
|
) -> Dict[str, Any]:
|
||||||
ctx = DocsRuntimeContext(config, target, manifest, current_project)
|
ctx = DocsRuntimeContext(config, target, manifest, current_project)
|
||||||
|
# This is not a Mashumaro to_dict call
|
||||||
return ctx.to_dict()
|
return ctx.to_dict()
|
||||||
|
|||||||
@@ -14,8 +14,12 @@ MacroNamespace = Dict[str, ParsedMacro]
|
|||||||
# so that higher precedence macros are found first.
|
# so that higher precedence macros are found first.
|
||||||
# This functionality is also provided by the MacroNamespace,
|
# This functionality is also provided by the MacroNamespace,
|
||||||
# but the intention is to eventually replace that class.
|
# but the intention is to eventually replace that class.
|
||||||
# This enables us to get the macor unique_id without
|
# This enables us to get the macro unique_id without
|
||||||
# processing every macro in the project.
|
# processing every macro in the project.
|
||||||
|
# Note: the root project macros override everything in the
|
||||||
|
# dbt internal projects. External projects (dependencies) will
|
||||||
|
# use their own macros first, then pull from the root project
|
||||||
|
# followed by dbt internal projects.
|
||||||
class MacroResolver:
|
class MacroResolver:
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
@@ -48,18 +52,29 @@ class MacroResolver:
|
|||||||
self.internal_packages_namespace.update(
|
self.internal_packages_namespace.update(
|
||||||
self.internal_packages[pkg])
|
self.internal_packages[pkg])
|
||||||
|
|
||||||
|
# search order:
|
||||||
|
# local_namespace (package of particular node), not including
|
||||||
|
# the internal packages or the root package
|
||||||
|
# This means that within an extra package, it uses its own macros
|
||||||
|
# root package namespace
|
||||||
|
# non-internal packages (that aren't local or root)
|
||||||
|
# dbt internal packages
|
||||||
def _build_macros_by_name(self):
|
def _build_macros_by_name(self):
|
||||||
macros_by_name = {}
|
macros_by_name = {}
|
||||||
# search root package macros
|
|
||||||
for macro in self.root_package_macros.values():
|
# all internal packages (already in the right order)
|
||||||
|
for macro in self.internal_packages_namespace.values():
|
||||||
macros_by_name[macro.name] = macro
|
macros_by_name[macro.name] = macro
|
||||||
# search miscellaneous non-internal packages
|
|
||||||
|
# non-internal packages
|
||||||
for fnamespace in self.packages.values():
|
for fnamespace in self.packages.values():
|
||||||
for macro in fnamespace.values():
|
for macro in fnamespace.values():
|
||||||
macros_by_name[macro.name] = macro
|
macros_by_name[macro.name] = macro
|
||||||
# search all internal packages
|
|
||||||
for macro in self.internal_packages_namespace.values():
|
# root package macros
|
||||||
|
for macro in self.root_package_macros.values():
|
||||||
macros_by_name[macro.name] = macro
|
macros_by_name[macro.name] = macro
|
||||||
|
|
||||||
self.macros_by_name = macros_by_name
|
self.macros_by_name = macros_by_name
|
||||||
|
|
||||||
def _add_macro_to(
|
def _add_macro_to(
|
||||||
@@ -97,18 +112,26 @@ class MacroResolver:
|
|||||||
for macro in self.macros.values():
|
for macro in self.macros.values():
|
||||||
self.add_macro(macro)
|
self.add_macro(macro)
|
||||||
|
|
||||||
def get_macro_id(self, local_package, macro_name):
|
def get_macro(self, local_package, macro_name):
|
||||||
local_package_macros = {}
|
local_package_macros = {}
|
||||||
if (local_package not in self.internal_package_names and
|
if (local_package not in self.internal_package_names and
|
||||||
local_package in self.packages):
|
local_package in self.packages):
|
||||||
local_package_macros = self.packages[local_package]
|
local_package_macros = self.packages[local_package]
|
||||||
# First: search the local packages for this macro
|
# First: search the local packages for this macro
|
||||||
if macro_name in local_package_macros:
|
if macro_name in local_package_macros:
|
||||||
return local_package_macros[macro_name].unique_id
|
return local_package_macros[macro_name]
|
||||||
|
# Now look up in the standard search order
|
||||||
if macro_name in self.macros_by_name:
|
if macro_name in self.macros_by_name:
|
||||||
return self.macros_by_name[macro_name].unique_id
|
return self.macros_by_name[macro_name]
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
def get_macro_id(self, local_package, macro_name):
|
||||||
|
macro = self.get_macro(local_package, macro_name)
|
||||||
|
if macro is None:
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
return macro.unique_id
|
||||||
|
|
||||||
|
|
||||||
# Currently this is just used by test processing in the schema
|
# Currently this is just used by test processing in the schema
|
||||||
# parser (in connection with the MacroResolver). Future work
|
# parser (in connection with the MacroResolver). Future work
|
||||||
@@ -122,16 +145,37 @@ class TestMacroNamespace:
|
|||||||
):
|
):
|
||||||
self.macro_resolver = macro_resolver
|
self.macro_resolver = macro_resolver
|
||||||
self.ctx = ctx
|
self.ctx = ctx
|
||||||
self.node = node
|
self.node = node # can be none
|
||||||
self.thread_ctx = thread_ctx
|
self.thread_ctx = thread_ctx
|
||||||
local_namespace = {}
|
self.local_namespace = {}
|
||||||
|
self.project_namespace = {}
|
||||||
if depends_on_macros:
|
if depends_on_macros:
|
||||||
for macro_unique_id in depends_on_macros:
|
dep_macros = []
|
||||||
macro = self.manifest.macros[macro_unique_id]
|
self.recursively_get_depends_on_macros(depends_on_macros, dep_macros)
|
||||||
local_namespace[macro.name] = MacroGenerator(
|
for macro_unique_id in dep_macros:
|
||||||
macro, self.ctx, self.node, self.thread_ctx,
|
if macro_unique_id in self.macro_resolver.macros:
|
||||||
)
|
# Split up the macro unique_id to get the project_name
|
||||||
self.local_namespace = local_namespace
|
(_, project_name, macro_name) = macro_unique_id.split('.')
|
||||||
|
# Save the plain macro_name in the local_namespace
|
||||||
|
macro = self.macro_resolver.macros[macro_unique_id]
|
||||||
|
macro_gen = MacroGenerator(
|
||||||
|
macro, self.ctx, self.node, self.thread_ctx,
|
||||||
|
)
|
||||||
|
self.local_namespace[macro_name] = macro_gen
|
||||||
|
# We also need the two part macro name
|
||||||
|
if project_name not in self.project_namespace:
|
||||||
|
self.project_namespace[project_name] = {}
|
||||||
|
self.project_namespace[project_name][macro_name] = macro_gen
|
||||||
|
|
||||||
|
def recursively_get_depends_on_macros(self, depends_on_macros, dep_macros):
|
||||||
|
for macro_unique_id in depends_on_macros:
|
||||||
|
if macro_unique_id in dep_macros:
|
||||||
|
continue
|
||||||
|
dep_macros.append(macro_unique_id)
|
||||||
|
if macro_unique_id in self.macro_resolver.macros:
|
||||||
|
macro = self.macro_resolver.macros[macro_unique_id]
|
||||||
|
if macro.depends_on.macros:
|
||||||
|
self.recursively_get_depends_on_macros(macro.depends_on.macros, dep_macros)
|
||||||
|
|
||||||
def get_from_package(
|
def get_from_package(
|
||||||
self, package_name: Optional[str], name: str
|
self, package_name: Optional[str], name: str
|
||||||
@@ -141,12 +185,14 @@ class TestMacroNamespace:
|
|||||||
macro = self.macro_resolver.macros_by_name.get(name)
|
macro = self.macro_resolver.macros_by_name.get(name)
|
||||||
elif package_name == GLOBAL_PROJECT_NAME:
|
elif package_name == GLOBAL_PROJECT_NAME:
|
||||||
macro = self.macro_resolver.internal_packages_namespace.get(name)
|
macro = self.macro_resolver.internal_packages_namespace.get(name)
|
||||||
elif package_name in self.resolver.packages:
|
elif package_name in self.macro_resolver.packages:
|
||||||
macro = self.macro_resolver.packages[package_name].get(name)
|
macro = self.macro_resolver.packages[package_name].get(name)
|
||||||
else:
|
else:
|
||||||
raise_compiler_error(
|
raise_compiler_error(
|
||||||
f"Could not find package '{package_name}'"
|
f"Could not find package '{package_name}'"
|
||||||
)
|
)
|
||||||
|
if not macro:
|
||||||
|
return None
|
||||||
macro_func = MacroGenerator(
|
macro_func = MacroGenerator(
|
||||||
macro, self.ctx, self.node, self.thread_ctx
|
macro, self.ctx, self.node, self.thread_ctx
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -19,13 +19,17 @@ FullNamespace = Dict[str, NamespaceMember]
|
|||||||
# and provide the ability to flatten them into the ManifestContexts
|
# and provide the ability to flatten them into the ManifestContexts
|
||||||
# that are created for jinja, so that macro calls can be resolved.
|
# that are created for jinja, so that macro calls can be resolved.
|
||||||
# Creates special iterators and _keys methods to flatten the lists.
|
# Creates special iterators and _keys methods to flatten the lists.
|
||||||
|
# When this class is created it has a static 'local_namespace' which
|
||||||
|
# depends on the package of the node, so it only works for one
|
||||||
|
# particular local package at a time for "flattening" into a context.
|
||||||
|
# 'get_by_package' should work for any macro.
|
||||||
class MacroNamespace(Mapping):
|
class MacroNamespace(Mapping):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
global_namespace: FlatNamespace,
|
global_namespace: FlatNamespace, # root package macros
|
||||||
local_namespace: FlatNamespace,
|
local_namespace: FlatNamespace, # packages for *this* node
|
||||||
global_project_namespace: FlatNamespace,
|
global_project_namespace: FlatNamespace, # internal packages
|
||||||
packages: Dict[str, FlatNamespace],
|
packages: Dict[str, FlatNamespace], # non-internal packages
|
||||||
):
|
):
|
||||||
self.global_namespace: FlatNamespace = global_namespace
|
self.global_namespace: FlatNamespace = global_namespace
|
||||||
self.local_namespace: FlatNamespace = local_namespace
|
self.local_namespace: FlatNamespace = local_namespace
|
||||||
@@ -33,13 +37,13 @@ class MacroNamespace(Mapping):
|
|||||||
self.global_project_namespace: FlatNamespace = global_project_namespace
|
self.global_project_namespace: FlatNamespace = global_project_namespace
|
||||||
|
|
||||||
def _search_order(self) -> Iterable[Union[FullNamespace, FlatNamespace]]:
|
def _search_order(self) -> Iterable[Union[FullNamespace, FlatNamespace]]:
|
||||||
yield self.local_namespace
|
yield self.local_namespace # local package
|
||||||
yield self.global_namespace
|
yield self.global_namespace # root package
|
||||||
yield self.packages
|
yield self.packages # non-internal packages
|
||||||
yield {
|
yield {
|
||||||
GLOBAL_PROJECT_NAME: self.global_project_namespace,
|
GLOBAL_PROJECT_NAME: self.global_project_namespace, # dbt
|
||||||
}
|
}
|
||||||
yield self.global_project_namespace
|
yield self.global_project_namespace # other internal project besides dbt
|
||||||
|
|
||||||
# provides special keys method for MacroNamespace iterator
|
# provides special keys method for MacroNamespace iterator
|
||||||
# returns keys from local_namespace, global_namespace, packages,
|
# returns keys from local_namespace, global_namespace, packages,
|
||||||
@@ -98,7 +102,9 @@ class MacroNamespaceBuilder:
|
|||||||
# internal packages comes from get_adapter_package_names
|
# internal packages comes from get_adapter_package_names
|
||||||
self.internal_package_names = set(internal_packages)
|
self.internal_package_names = set(internal_packages)
|
||||||
self.internal_package_names_order = internal_packages
|
self.internal_package_names_order = internal_packages
|
||||||
# macro_func is added here if in root package
|
# macro_func is added here if in root package, since
|
||||||
|
# the root package acts as a "global" namespace, overriding
|
||||||
|
# everything else except local external package macro calls
|
||||||
self.globals: FlatNamespace = {}
|
self.globals: FlatNamespace = {}
|
||||||
# macro_func is added here if it's the package for this node
|
# macro_func is added here if it's the package for this node
|
||||||
self.locals: FlatNamespace = {}
|
self.locals: FlatNamespace = {}
|
||||||
@@ -169,8 +175,8 @@ class MacroNamespaceBuilder:
|
|||||||
global_project_namespace.update(self.internal_packages[pkg])
|
global_project_namespace.update(self.internal_packages[pkg])
|
||||||
|
|
||||||
return MacroNamespace(
|
return MacroNamespace(
|
||||||
global_namespace=self.globals,
|
global_namespace=self.globals, # root package macros
|
||||||
local_namespace=self.locals,
|
local_namespace=self.locals, # packages for *this* node
|
||||||
global_project_namespace=global_project_namespace,
|
global_project_namespace=global_project_namespace, # internal packages
|
||||||
packages=self.packages,
|
packages=self.packages, # non internal_packages
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -62,6 +62,7 @@ class ManifestContext(ConfiguredContext):
|
|||||||
# keys in the manifest dictionary
|
# keys in the manifest dictionary
|
||||||
if isinstance(self.namespace, TestMacroNamespace):
|
if isinstance(self.namespace, TestMacroNamespace):
|
||||||
dct.update(self.namespace.local_namespace)
|
dct.update(self.namespace.local_namespace)
|
||||||
|
dct.update(self.namespace.project_namespace)
|
||||||
else:
|
else:
|
||||||
dct.update(self.namespace)
|
dct.update(self.namespace)
|
||||||
return dct
|
return dct
|
||||||
|
|||||||
@@ -114,7 +114,8 @@ class BaseDatabaseWrapper:
|
|||||||
return search_prefixes
|
return search_prefixes
|
||||||
|
|
||||||
def dispatch(
|
def dispatch(
|
||||||
self, macro_name: str, packages: Optional[List[str]] = None
|
self, macro_name: str, packages: Optional[List[str]] = None,
|
||||||
|
macro_namespace: Optional[str] = None,
|
||||||
) -> MacroGenerator:
|
) -> MacroGenerator:
|
||||||
search_packages: List[Optional[str]]
|
search_packages: List[Optional[str]]
|
||||||
|
|
||||||
@@ -128,15 +129,22 @@ class BaseDatabaseWrapper:
|
|||||||
)
|
)
|
||||||
raise CompilationException(msg)
|
raise CompilationException(msg)
|
||||||
|
|
||||||
if packages is None:
|
namespace = packages if packages else macro_namespace
|
||||||
|
|
||||||
|
if namespace is None:
|
||||||
search_packages = [None]
|
search_packages = [None]
|
||||||
elif isinstance(packages, str):
|
elif isinstance(namespace, str):
|
||||||
raise CompilationException(
|
search_packages = self._adapter.config.get_macro_search_order(namespace)
|
||||||
f'In adapter.dispatch, got a string packages argument '
|
if not search_packages and namespace in self._adapter.config.dependencies:
|
||||||
f'("{packages}"), but packages should be None or a list.'
|
search_packages = [namespace]
|
||||||
)
|
if not search_packages:
|
||||||
|
raise CompilationException(
|
||||||
|
f'In adapter.dispatch, got a string packages argument '
|
||||||
|
f'("{packages}"), but packages should be None or a list.'
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
search_packages = packages
|
# Not a string and not None so must be a list
|
||||||
|
search_packages = namespace
|
||||||
|
|
||||||
attempts = []
|
attempts = []
|
||||||
|
|
||||||
@@ -1115,7 +1123,7 @@ class ProviderContext(ManifestContext):
|
|||||||
|
|
||||||
@contextproperty('model')
|
@contextproperty('model')
|
||||||
def ctx_model(self) -> Dict[str, Any]:
|
def ctx_model(self) -> Dict[str, Any]:
|
||||||
return self.model.to_dict()
|
return self.model.to_dict(omit_none=True)
|
||||||
|
|
||||||
@contextproperty
|
@contextproperty
|
||||||
def pre_hooks(self) -> Optional[List[Dict[str, Any]]]:
|
def pre_hooks(self) -> Optional[List[Dict[str, Any]]]:
|
||||||
@@ -1231,7 +1239,7 @@ class ModelContext(ProviderContext):
|
|||||||
if isinstance(self.model, ParsedSourceDefinition):
|
if isinstance(self.model, ParsedSourceDefinition):
|
||||||
return []
|
return []
|
||||||
return [
|
return [
|
||||||
h.to_dict() for h in self.model.config.pre_hook
|
h.to_dict(omit_none=True) for h in self.model.config.pre_hook
|
||||||
]
|
]
|
||||||
|
|
||||||
@contextproperty
|
@contextproperty
|
||||||
@@ -1239,7 +1247,7 @@ class ModelContext(ProviderContext):
|
|||||||
if isinstance(self.model, ParsedSourceDefinition):
|
if isinstance(self.model, ParsedSourceDefinition):
|
||||||
return []
|
return []
|
||||||
return [
|
return [
|
||||||
h.to_dict() for h in self.model.config.post_hook
|
h.to_dict(omit_none=True) for h in self.model.config.post_hook
|
||||||
]
|
]
|
||||||
|
|
||||||
@contextproperty
|
@contextproperty
|
||||||
@@ -1408,7 +1416,12 @@ class TestContext(ProviderContext):
|
|||||||
self.macro_resolver = macro_resolver
|
self.macro_resolver = macro_resolver
|
||||||
self.thread_ctx = MacroStack()
|
self.thread_ctx = MacroStack()
|
||||||
super().__init__(model, config, manifest, provider, context_config)
|
super().__init__(model, config, manifest, provider, context_config)
|
||||||
self._build_test_namespace
|
self._build_test_namespace()
|
||||||
|
# We need to rebuild this because it's already been built by
|
||||||
|
# the ProviderContext with the wrong namespace.
|
||||||
|
self.db_wrapper = self.provider.DatabaseWrapper(
|
||||||
|
self.adapter, self.namespace
|
||||||
|
)
|
||||||
|
|
||||||
def _build_namespace(self):
|
def _build_namespace(self):
|
||||||
return {}
|
return {}
|
||||||
@@ -1421,11 +1434,17 @@ class TestContext(ProviderContext):
|
|||||||
depends_on_macros = []
|
depends_on_macros = []
|
||||||
if self.model.depends_on and self.model.depends_on.macros:
|
if self.model.depends_on and self.model.depends_on.macros:
|
||||||
depends_on_macros = self.model.depends_on.macros
|
depends_on_macros = self.model.depends_on.macros
|
||||||
|
lookup_macros = depends_on_macros.copy()
|
||||||
|
for macro_unique_id in lookup_macros:
|
||||||
|
lookup_macro = self.macro_resolver.macros.get(macro_unique_id)
|
||||||
|
if lookup_macro:
|
||||||
|
depends_on_macros.extend(lookup_macro.depends_on.macros)
|
||||||
|
|
||||||
macro_namespace = TestMacroNamespace(
|
macro_namespace = TestMacroNamespace(
|
||||||
self.macro_resolver, self.ctx, self.node, self.thread_ctx,
|
self.macro_resolver, self._ctx, self.model, self.thread_ctx,
|
||||||
depends_on_macros
|
depends_on_macros
|
||||||
)
|
)
|
||||||
self._namespace = macro_namespace
|
self.namespace = macro_namespace
|
||||||
|
|
||||||
|
|
||||||
def generate_test_context(
|
def generate_test_context(
|
||||||
|
|||||||
@@ -132,7 +132,7 @@ class Credentials(
|
|||||||
) -> Iterable[Tuple[str, Any]]:
|
) -> Iterable[Tuple[str, Any]]:
|
||||||
"""Return an ordered iterator of key/value pairs for pretty-printing.
|
"""Return an ordered iterator of key/value pairs for pretty-printing.
|
||||||
"""
|
"""
|
||||||
as_dict = self.to_dict(options={'keep_none': True})
|
as_dict = self.to_dict(omit_none=False)
|
||||||
connection_keys = set(self._connection_keys())
|
connection_keys = set(self._connection_keys())
|
||||||
aliases: List[str] = []
|
aliases: List[str] = []
|
||||||
if with_aliases:
|
if with_aliases:
|
||||||
@@ -148,8 +148,8 @@ class Credentials(
|
|||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def __pre_deserialize__(cls, data, options=None):
|
def __pre_deserialize__(cls, data):
|
||||||
data = super().__pre_deserialize__(data, options=options)
|
data = super().__pre_deserialize__(data)
|
||||||
data = cls.translate_aliases(data)
|
data = cls.translate_aliases(data)
|
||||||
return data
|
return data
|
||||||
|
|
||||||
@@ -159,7 +159,7 @@ class Credentials(
|
|||||||
) -> Dict[str, Any]:
|
) -> Dict[str, Any]:
|
||||||
return translate_aliases(kwargs, cls._ALIASES, recurse)
|
return translate_aliases(kwargs, cls._ALIASES, recurse)
|
||||||
|
|
||||||
def __post_serialize__(self, dct, options=None):
|
def __post_serialize__(self, dct):
|
||||||
# no super() -- do we need it?
|
# no super() -- do we need it?
|
||||||
if self._ALIASES:
|
if self._ALIASES:
|
||||||
dct.update({
|
dct.update({
|
||||||
|
|||||||
@@ -43,6 +43,7 @@ class CompiledNode(ParsedNode, CompiledNodeMixin):
|
|||||||
extra_ctes_injected: bool = False
|
extra_ctes_injected: bool = False
|
||||||
extra_ctes: List[InjectedCTE] = field(default_factory=list)
|
extra_ctes: List[InjectedCTE] = field(default_factory=list)
|
||||||
relation_name: Optional[str] = None
|
relation_name: Optional[str] = None
|
||||||
|
_pre_injected_sql: Optional[str] = None
|
||||||
|
|
||||||
def set_cte(self, cte_id: str, sql: str):
|
def set_cte(self, cte_id: str, sql: str):
|
||||||
"""This is the equivalent of what self.extra_ctes[cte_id] = sql would
|
"""This is the equivalent of what self.extra_ctes[cte_id] = sql would
|
||||||
@@ -55,6 +56,12 @@ class CompiledNode(ParsedNode, CompiledNodeMixin):
|
|||||||
else:
|
else:
|
||||||
self.extra_ctes.append(InjectedCTE(id=cte_id, sql=sql))
|
self.extra_ctes.append(InjectedCTE(id=cte_id, sql=sql))
|
||||||
|
|
||||||
|
def __post_serialize__(self, dct):
|
||||||
|
dct = super().__post_serialize__(dct)
|
||||||
|
if '_pre_injected_sql' in dct:
|
||||||
|
del dct['_pre_injected_sql']
|
||||||
|
return dct
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class CompiledAnalysisNode(CompiledNode):
|
class CompiledAnalysisNode(CompiledNode):
|
||||||
@@ -178,7 +185,7 @@ def parsed_instance_for(compiled: CompiledNode) -> ParsedResource:
|
|||||||
raise ValueError('invalid resource_type: {}'
|
raise ValueError('invalid resource_type: {}'
|
||||||
.format(compiled.resource_type))
|
.format(compiled.resource_type))
|
||||||
|
|
||||||
return cls.from_dict(compiled.to_dict())
|
return cls.from_dict(compiled.to_dict(omit_none=True))
|
||||||
|
|
||||||
|
|
||||||
NonSourceCompiledNode = Union[
|
NonSourceCompiledNode = Union[
|
||||||
|
|||||||
@@ -240,7 +240,7 @@ def build_edges(nodes: List[ManifestNode]):
|
|||||||
|
|
||||||
|
|
||||||
def _deepcopy(value):
|
def _deepcopy(value):
|
||||||
return value.from_dict(value.to_dict())
|
return value.from_dict(value.to_dict(omit_none=True))
|
||||||
|
|
||||||
|
|
||||||
class Locality(enum.IntEnum):
|
class Locality(enum.IntEnum):
|
||||||
@@ -564,11 +564,11 @@ class Manifest(MacroMethods):
|
|||||||
"""
|
"""
|
||||||
self.flat_graph = {
|
self.flat_graph = {
|
||||||
'nodes': {
|
'nodes': {
|
||||||
k: v.to_dict(options={'keep_none': True})
|
k: v.to_dict(omit_none=False)
|
||||||
for k, v in self.nodes.items()
|
for k, v in self.nodes.items()
|
||||||
},
|
},
|
||||||
'sources': {
|
'sources': {
|
||||||
k: v.to_dict(options={'keep_none': True})
|
k: v.to_dict(omit_none=False)
|
||||||
for k, v in self.sources.items()
|
for k, v in self.sources.items()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -755,7 +755,7 @@ class Manifest(MacroMethods):
|
|||||||
|
|
||||||
# When 'to_dict' is called on the Manifest, it substitues a
|
# When 'to_dict' is called on the Manifest, it substitues a
|
||||||
# WritableManifest
|
# WritableManifest
|
||||||
def __pre_serialize__(self, options=None):
|
def __pre_serialize__(self):
|
||||||
return self.writable_manifest()
|
return self.writable_manifest()
|
||||||
|
|
||||||
def write(self, path):
|
def write(self, path):
|
||||||
|
|||||||
@@ -307,7 +307,7 @@ class BaseConfig(
|
|||||||
"""
|
"""
|
||||||
# sadly, this is a circular import
|
# sadly, this is a circular import
|
||||||
from dbt.adapters.factory import get_config_class_by_name
|
from dbt.adapters.factory import get_config_class_by_name
|
||||||
dct = self.to_dict(options={'keep_none': True})
|
dct = self.to_dict(omit_none=False)
|
||||||
|
|
||||||
adapter_config_cls = get_config_class_by_name(adapter_type)
|
adapter_config_cls = get_config_class_by_name(adapter_type)
|
||||||
|
|
||||||
@@ -326,12 +326,12 @@ class BaseConfig(
|
|||||||
return self.from_dict(dct)
|
return self.from_dict(dct)
|
||||||
|
|
||||||
def finalize_and_validate(self: T) -> T:
|
def finalize_and_validate(self: T) -> T:
|
||||||
dct = self.to_dict(options={'keep_none': True})
|
dct = self.to_dict(omit_none=False)
|
||||||
self.validate(dct)
|
self.validate(dct)
|
||||||
return self.from_dict(dct)
|
return self.from_dict(dct)
|
||||||
|
|
||||||
def replace(self, **kwargs):
|
def replace(self, **kwargs):
|
||||||
dct = self.to_dict()
|
dct = self.to_dict(omit_none=True)
|
||||||
|
|
||||||
mapping = self.field_mapping()
|
mapping = self.field_mapping()
|
||||||
for key, value in kwargs.items():
|
for key, value in kwargs.items():
|
||||||
@@ -396,8 +396,8 @@ class NodeConfig(BaseConfig):
|
|||||||
full_refresh: Optional[bool] = None
|
full_refresh: Optional[bool] = None
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def __pre_deserialize__(cls, data, options=None):
|
def __pre_deserialize__(cls, data):
|
||||||
data = super().__pre_deserialize__(data, options=options)
|
data = super().__pre_deserialize__(data)
|
||||||
field_map = {'post-hook': 'post_hook', 'pre-hook': 'pre_hook'}
|
field_map = {'post-hook': 'post_hook', 'pre-hook': 'pre_hook'}
|
||||||
# create a new dict because otherwise it gets overwritten in
|
# create a new dict because otherwise it gets overwritten in
|
||||||
# tests
|
# tests
|
||||||
@@ -414,8 +414,8 @@ class NodeConfig(BaseConfig):
|
|||||||
data[new_name] = data.pop(field_name)
|
data[new_name] = data.pop(field_name)
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def __post_serialize__(self, dct, options=None):
|
def __post_serialize__(self, dct):
|
||||||
dct = super().__post_serialize__(dct, options=options)
|
dct = super().__post_serialize__(dct)
|
||||||
field_map = {'post_hook': 'post-hook', 'pre_hook': 'pre-hook'}
|
field_map = {'post_hook': 'post-hook', 'pre_hook': 'pre-hook'}
|
||||||
for field_name in field_map:
|
for field_name in field_map:
|
||||||
if field_name in dct:
|
if field_name in dct:
|
||||||
@@ -480,7 +480,7 @@ class SnapshotConfig(EmptySnapshotConfig):
|
|||||||
# formerly supported with GenericSnapshotConfig
|
# formerly supported with GenericSnapshotConfig
|
||||||
|
|
||||||
def finalize_and_validate(self):
|
def finalize_and_validate(self):
|
||||||
data = self.to_dict()
|
data = self.to_dict(omit_none=True)
|
||||||
self.validate(data)
|
self.validate(data)
|
||||||
return self.from_dict(data)
|
return self.from_dict(data)
|
||||||
|
|
||||||
|
|||||||
@@ -99,8 +99,8 @@ class HasRelationMetadata(dbtClassMixin, Replaceable):
|
|||||||
# because it messes up the subclasses and default parameters
|
# because it messes up the subclasses and default parameters
|
||||||
# so hack it here
|
# so hack it here
|
||||||
@classmethod
|
@classmethod
|
||||||
def __pre_deserialize__(cls, data, options=None):
|
def __pre_deserialize__(cls, data):
|
||||||
data = super().__pre_deserialize__(data, options=options)
|
data = super().__pre_deserialize__(data)
|
||||||
if 'database' not in data:
|
if 'database' not in data:
|
||||||
data['database'] = None
|
data['database'] = None
|
||||||
return data
|
return data
|
||||||
@@ -141,7 +141,7 @@ class ParsedNodeMixins(dbtClassMixin):
|
|||||||
# Maybe there should be validation or restrictions
|
# Maybe there should be validation or restrictions
|
||||||
# elsewhere?
|
# elsewhere?
|
||||||
assert isinstance(self, dbtClassMixin)
|
assert isinstance(self, dbtClassMixin)
|
||||||
dct = self.to_dict(options={'keep_none': True})
|
dct = self.to_dict(omit_none=False)
|
||||||
self.validate(dct)
|
self.validate(dct)
|
||||||
|
|
||||||
def get_materialization(self):
|
def get_materialization(self):
|
||||||
@@ -454,7 +454,7 @@ class ParsedMacro(UnparsedBaseNode, HasUniqueID):
|
|||||||
if flags.STRICT_MODE:
|
if flags.STRICT_MODE:
|
||||||
# What does this actually validate?
|
# What does this actually validate?
|
||||||
assert isinstance(self, dbtClassMixin)
|
assert isinstance(self, dbtClassMixin)
|
||||||
dct = self.to_dict(options={'keep_none': True})
|
dct = self.to_dict(omit_none=False)
|
||||||
self.validate(dct)
|
self.validate(dct)
|
||||||
|
|
||||||
def same_contents(self, other: Optional['ParsedMacro']) -> bool:
|
def same_contents(self, other: Optional['ParsedMacro']) -> bool:
|
||||||
|
|||||||
@@ -231,12 +231,9 @@ class UnparsedSourceTableDefinition(HasColumnTests, HasTests):
|
|||||||
external: Optional[ExternalTable] = None
|
external: Optional[ExternalTable] = None
|
||||||
tags: List[str] = field(default_factory=list)
|
tags: List[str] = field(default_factory=list)
|
||||||
|
|
||||||
def __post_serialize__(self, dct, options=None):
|
def __post_serialize__(self, dct):
|
||||||
dct = super().__post_serialize__(dct)
|
dct = super().__post_serialize__(dct)
|
||||||
keep_none = False
|
if 'freshness' not in dct and self.freshness is None:
|
||||||
if options and 'keep_none' in options and options['keep_none']:
|
|
||||||
keep_none = True
|
|
||||||
if not keep_none and self.freshness is None:
|
|
||||||
dct['freshness'] = None
|
dct['freshness'] = None
|
||||||
return dct
|
return dct
|
||||||
|
|
||||||
@@ -261,12 +258,9 @@ class UnparsedSourceDefinition(dbtClassMixin, Replaceable):
|
|||||||
def yaml_key(self) -> 'str':
|
def yaml_key(self) -> 'str':
|
||||||
return 'sources'
|
return 'sources'
|
||||||
|
|
||||||
def __post_serialize__(self, dct, options=None):
|
def __post_serialize__(self, dct):
|
||||||
dct = super().__post_serialize__(dct)
|
dct = super().__post_serialize__(dct)
|
||||||
keep_none = False
|
if 'freshness' not in dct and self.freshness is None:
|
||||||
if options and 'keep_none' in options and options['keep_none']:
|
|
||||||
keep_none = True
|
|
||||||
if not keep_none and self.freshness is None:
|
|
||||||
dct['freshness'] = None
|
dct['freshness'] = None
|
||||||
return dct
|
return dct
|
||||||
|
|
||||||
@@ -290,7 +284,7 @@ class SourceTablePatch(dbtClassMixin):
|
|||||||
columns: Optional[Sequence[UnparsedColumn]] = None
|
columns: Optional[Sequence[UnparsedColumn]] = None
|
||||||
|
|
||||||
def to_patch_dict(self) -> Dict[str, Any]:
|
def to_patch_dict(self) -> Dict[str, Any]:
|
||||||
dct = self.to_dict()
|
dct = self.to_dict(omit_none=True)
|
||||||
remove_keys = ('name')
|
remove_keys = ('name')
|
||||||
for key in remove_keys:
|
for key in remove_keys:
|
||||||
if key in dct:
|
if key in dct:
|
||||||
@@ -327,7 +321,7 @@ class SourcePatch(dbtClassMixin, Replaceable):
|
|||||||
tags: Optional[List[str]] = None
|
tags: Optional[List[str]] = None
|
||||||
|
|
||||||
def to_patch_dict(self) -> Dict[str, Any]:
|
def to_patch_dict(self) -> Dict[str, Any]:
|
||||||
dct = self.to_dict()
|
dct = self.to_dict(omit_none=True)
|
||||||
remove_keys = ('name', 'overrides', 'tables', 'path')
|
remove_keys = ('name', 'overrides', 'tables', 'path')
|
||||||
for key in remove_keys:
|
for key in remove_keys:
|
||||||
if key in dct:
|
if key in dct:
|
||||||
|
|||||||
@@ -190,6 +190,7 @@ class Project(HyphenatedDbtClassMixin, Replaceable):
|
|||||||
on_run_start: Optional[List[str]] = field(default_factory=list_str)
|
on_run_start: Optional[List[str]] = field(default_factory=list_str)
|
||||||
on_run_end: Optional[List[str]] = field(default_factory=list_str)
|
on_run_end: Optional[List[str]] = field(default_factory=list_str)
|
||||||
require_dbt_version: Optional[Union[List[str], str]] = None
|
require_dbt_version: Optional[Union[List[str], str]] = None
|
||||||
|
dispatch: List[Dict[str, Any]] = field(default_factory=list)
|
||||||
models: Dict[str, Any] = field(default_factory=dict)
|
models: Dict[str, Any] = field(default_factory=dict)
|
||||||
seeds: Dict[str, Any] = field(default_factory=dict)
|
seeds: Dict[str, Any] = field(default_factory=dict)
|
||||||
snapshots: Dict[str, Any] = field(default_factory=dict)
|
snapshots: Dict[str, Any] = field(default_factory=dict)
|
||||||
@@ -211,6 +212,13 @@ class Project(HyphenatedDbtClassMixin, Replaceable):
|
|||||||
raise ValidationError(
|
raise ValidationError(
|
||||||
f"Invalid project name: {data['name']} is a reserved word"
|
f"Invalid project name: {data['name']} is a reserved word"
|
||||||
)
|
)
|
||||||
|
# validate dispatch config
|
||||||
|
if 'dispatch' in data and data['dispatch']:
|
||||||
|
entries = data['dispatch']
|
||||||
|
for entry in entries:
|
||||||
|
if ('macro_namespace' not in entry or 'search_order' not in entry or
|
||||||
|
not isinstance(entry['search_order'], list)):
|
||||||
|
raise ValidationError(f"Invalid project dispatch config: {entry}")
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
|
|||||||
@@ -52,7 +52,7 @@ class FakeAPIObject(dbtClassMixin, Replaceable, Mapping):
|
|||||||
return len(fields(self.__class__))
|
return len(fields(self.__class__))
|
||||||
|
|
||||||
def incorporate(self, **kwargs):
|
def incorporate(self, **kwargs):
|
||||||
value = self.to_dict()
|
value = self.to_dict(omit_none=True)
|
||||||
value = deep_merge(value, kwargs)
|
value = deep_merge(value, kwargs)
|
||||||
return self.from_dict(value)
|
return self.from_dict(value)
|
||||||
|
|
||||||
|
|||||||
@@ -97,8 +97,8 @@ class BaseResult(dbtClassMixin):
|
|||||||
message: Optional[Union[str, int]]
|
message: Optional[Union[str, int]]
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def __pre_deserialize__(cls, data, options=None):
|
def __pre_deserialize__(cls, data):
|
||||||
data = super().__pre_deserialize__(data, options=options)
|
data = super().__pre_deserialize__(data)
|
||||||
if 'message' not in data:
|
if 'message' not in data:
|
||||||
data['message'] = None
|
data['message'] = None
|
||||||
return data
|
return data
|
||||||
@@ -206,7 +206,7 @@ class RunResultsArtifact(ExecutionResult, ArtifactMixin):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def write(self, path: str):
|
def write(self, path: str):
|
||||||
write_json(path, self.to_dict(options={'keep_none': True}))
|
write_json(path, self.to_dict(omit_none=False))
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@@ -448,8 +448,8 @@ class CatalogResults(dbtClassMixin):
|
|||||||
errors: Optional[List[str]] = None
|
errors: Optional[List[str]] = None
|
||||||
_compile_results: Optional[Any] = None
|
_compile_results: Optional[Any] = None
|
||||||
|
|
||||||
def __post_serialize__(self, dct, options=None):
|
def __post_serialize__(self, dct):
|
||||||
dct = super().__post_serialize__(dct, options=options)
|
dct = super().__post_serialize__(dct)
|
||||||
if '_compile_results' in dct:
|
if '_compile_results' in dct:
|
||||||
del dct['_compile_results']
|
del dct['_compile_results']
|
||||||
return dct
|
return dct
|
||||||
|
|||||||
@@ -38,8 +38,8 @@ class RPCParameters(dbtClassMixin):
|
|||||||
timeout: Optional[float]
|
timeout: Optional[float]
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def __pre_deserialize__(cls, data, options=None):
|
def __pre_deserialize__(cls, data, omit_none=True):
|
||||||
data = super().__pre_deserialize__(data, options=options)
|
data = super().__pre_deserialize__(data)
|
||||||
if 'timeout' not in data:
|
if 'timeout' not in data:
|
||||||
data['timeout'] = None
|
data['timeout'] = None
|
||||||
if 'task_tags' not in data:
|
if 'task_tags' not in data:
|
||||||
@@ -428,8 +428,8 @@ class TaskTiming(dbtClassMixin):
|
|||||||
# These ought to be defaults but superclass order doesn't
|
# These ought to be defaults but superclass order doesn't
|
||||||
# allow that to work
|
# allow that to work
|
||||||
@classmethod
|
@classmethod
|
||||||
def __pre_deserialize__(cls, data, options=None):
|
def __pre_deserialize__(cls, data):
|
||||||
data = super().__pre_deserialize__(data, options=options)
|
data = super().__pre_deserialize__(data)
|
||||||
for field_name in ('start', 'end', 'elapsed'):
|
for field_name in ('start', 'end', 'elapsed'):
|
||||||
if field_name not in data:
|
if field_name not in data:
|
||||||
data[field_name] = None
|
data[field_name] = None
|
||||||
@@ -496,8 +496,8 @@ class PollResult(RemoteResult, TaskTiming):
|
|||||||
# These ought to be defaults but superclass order doesn't
|
# These ought to be defaults but superclass order doesn't
|
||||||
# allow that to work
|
# allow that to work
|
||||||
@classmethod
|
@classmethod
|
||||||
def __pre_deserialize__(cls, data, options=None):
|
def __pre_deserialize__(cls, data):
|
||||||
data = super().__pre_deserialize__(data, options=options)
|
data = super().__pre_deserialize__(data)
|
||||||
for field_name in ('start', 'end', 'elapsed'):
|
for field_name in ('start', 'end', 'elapsed'):
|
||||||
if field_name not in data:
|
if field_name not in data:
|
||||||
data[field_name] = None
|
data[field_name] = None
|
||||||
|
|||||||
@@ -58,7 +58,7 @@ class Mergeable(Replaceable):
|
|||||||
class Writable:
|
class Writable:
|
||||||
def write(self, path: str):
|
def write(self, path: str):
|
||||||
write_json(
|
write_json(
|
||||||
path, self.to_dict(options={'keep_none': True}) # type: ignore
|
path, self.to_dict(omit_none=False) # type: ignore
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -74,7 +74,7 @@ class AdditionalPropertiesMixin:
|
|||||||
# not in the class definitions and puts them in an
|
# not in the class definitions and puts them in an
|
||||||
# _extra dict in the class
|
# _extra dict in the class
|
||||||
@classmethod
|
@classmethod
|
||||||
def __pre_deserialize__(cls, data, options=None):
|
def __pre_deserialize__(cls, data):
|
||||||
# dir() did not work because fields with
|
# dir() did not work because fields with
|
||||||
# metadata settings are not found
|
# metadata settings are not found
|
||||||
# The original version of this would create the
|
# The original version of this would create the
|
||||||
@@ -93,18 +93,18 @@ class AdditionalPropertiesMixin:
|
|||||||
else:
|
else:
|
||||||
new_dict[key] = value
|
new_dict[key] = value
|
||||||
data = new_dict
|
data = new_dict
|
||||||
data = super().__pre_deserialize__(data, options=options)
|
data = super().__pre_deserialize__(data)
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def __post_serialize__(self, dct, options=None):
|
def __post_serialize__(self, dct):
|
||||||
data = super().__post_serialize__(dct, options=options)
|
data = super().__post_serialize__(dct)
|
||||||
data.update(self.extra)
|
data.update(self.extra)
|
||||||
if '_extra' in data:
|
if '_extra' in data:
|
||||||
del data['_extra']
|
del data['_extra']
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def replace(self, **kwargs):
|
def replace(self, **kwargs):
|
||||||
dct = self.to_dict(options={'keep_none': True})
|
dct = self.to_dict(omit_none=False)
|
||||||
dct.update(kwargs)
|
dct.update(kwargs)
|
||||||
return self.from_dict(dct)
|
return self.from_dict(dct)
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
from typing import (
|
from typing import (
|
||||||
Type, ClassVar, Dict, cast, TypeVar
|
Type, ClassVar, cast,
|
||||||
)
|
)
|
||||||
import re
|
import re
|
||||||
from dataclasses import fields
|
from dataclasses import fields
|
||||||
@@ -9,29 +9,28 @@ from dateutil.parser import parse
|
|||||||
|
|
||||||
from hologram import JsonSchemaMixin, FieldEncoder, ValidationError
|
from hologram import JsonSchemaMixin, FieldEncoder, ValidationError
|
||||||
|
|
||||||
|
# type: ignore
|
||||||
from mashumaro import DataClassDictMixin
|
from mashumaro import DataClassDictMixin
|
||||||
from mashumaro.types import SerializableEncoder, SerializableType
|
from mashumaro.config import (
|
||||||
|
TO_DICT_ADD_OMIT_NONE_FLAG, BaseConfig as MashBaseConfig
|
||||||
|
)
|
||||||
|
from mashumaro.types import SerializableType, SerializationStrategy
|
||||||
|
|
||||||
|
|
||||||
class DateTimeSerializableEncoder(SerializableEncoder[datetime]):
|
class DateTimeSerialization(SerializationStrategy):
|
||||||
@classmethod
|
def serialize(self, value):
|
||||||
def _serialize(cls, value: datetime) -> str:
|
|
||||||
out = value.isoformat()
|
out = value.isoformat()
|
||||||
# Assume UTC if timezone is missing
|
# Assume UTC if timezone is missing
|
||||||
if value.tzinfo is None:
|
if value.tzinfo is None:
|
||||||
out = out + "Z"
|
out = out + "Z"
|
||||||
return out
|
return out
|
||||||
|
|
||||||
@classmethod
|
def deserialize(self, value):
|
||||||
def _deserialize(cls, value: str) -> datetime:
|
|
||||||
return (
|
return (
|
||||||
value if isinstance(value, datetime) else parse(cast(str, value))
|
value if isinstance(value, datetime) else parse(cast(str, value))
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
TV = TypeVar("TV")
|
|
||||||
|
|
||||||
|
|
||||||
# This class pulls in both JsonSchemaMixin from Hologram and
|
# This class pulls in both JsonSchemaMixin from Hologram and
|
||||||
# DataClassDictMixin from our fork of Mashumaro. The 'to_dict'
|
# DataClassDictMixin from our fork of Mashumaro. The 'to_dict'
|
||||||
# and 'from_dict' methods come from Mashumaro. Building
|
# and 'from_dict' methods come from Mashumaro. Building
|
||||||
@@ -43,23 +42,21 @@ class dbtClassMixin(DataClassDictMixin, JsonSchemaMixin):
|
|||||||
against the schema
|
against the schema
|
||||||
"""
|
"""
|
||||||
|
|
||||||
_serializable_encoders: ClassVar[Dict[str, SerializableEncoder]] = {
|
class Config(MashBaseConfig):
|
||||||
'datetime.datetime': DateTimeSerializableEncoder(),
|
code_generation_options = [
|
||||||
}
|
TO_DICT_ADD_OMIT_NONE_FLAG,
|
||||||
|
]
|
||||||
|
serialization_strategy = {
|
||||||
|
datetime: DateTimeSerialization(),
|
||||||
|
}
|
||||||
|
|
||||||
_hyphenated: ClassVar[bool] = False
|
_hyphenated: ClassVar[bool] = False
|
||||||
ADDITIONAL_PROPERTIES: ClassVar[bool] = False
|
ADDITIONAL_PROPERTIES: ClassVar[bool] = False
|
||||||
|
|
||||||
# This is called by the mashumaro to_dict in order to handle
|
# This is called by the mashumaro to_dict in order to handle
|
||||||
# nested classes.
|
# nested classes.
|
||||||
# Munges the dict that's returned.
|
# Munges the dict that's returned.
|
||||||
def __post_serialize__(self, dct, options=None):
|
def __post_serialize__(self, dct):
|
||||||
keep_none = False
|
|
||||||
if options and 'keep_none' in options and options['keep_none']:
|
|
||||||
keep_none = True
|
|
||||||
if not keep_none: # remove attributes that are None
|
|
||||||
new_dict = {k: v for k, v in dct.items() if v is not None}
|
|
||||||
dct = new_dict
|
|
||||||
|
|
||||||
if self._hyphenated:
|
if self._hyphenated:
|
||||||
new_dict = {}
|
new_dict = {}
|
||||||
for key in dct:
|
for key in dct:
|
||||||
@@ -75,7 +72,7 @@ class dbtClassMixin(DataClassDictMixin, JsonSchemaMixin):
|
|||||||
# This is called by the mashumaro _from_dict method, before
|
# This is called by the mashumaro _from_dict method, before
|
||||||
# performing the conversion to a dict
|
# performing the conversion to a dict
|
||||||
@classmethod
|
@classmethod
|
||||||
def __pre_deserialize__(cls, data, options=None):
|
def __pre_deserialize__(cls, data):
|
||||||
if cls._hyphenated:
|
if cls._hyphenated:
|
||||||
new_dict = {}
|
new_dict = {}
|
||||||
for key in data:
|
for key in data:
|
||||||
@@ -92,8 +89,8 @@ class dbtClassMixin(DataClassDictMixin, JsonSchemaMixin):
|
|||||||
# hologram and in mashumaro.
|
# hologram and in mashumaro.
|
||||||
def _local_to_dict(self, **kwargs):
|
def _local_to_dict(self, **kwargs):
|
||||||
args = {}
|
args = {}
|
||||||
if 'omit_none' in kwargs and kwargs['omit_none'] is False:
|
if 'omit_none' in kwargs:
|
||||||
args['options'] = {'keep_none': True}
|
args['omit_none'] = kwargs['omit_none']
|
||||||
return self.to_dict(**args)
|
return self.to_dict(**args)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -95,7 +95,8 @@ class JsonFormatter(LogMessageFormatter):
|
|||||||
# utils imports exceptions which imports logger...
|
# utils imports exceptions which imports logger...
|
||||||
import dbt.utils
|
import dbt.utils
|
||||||
log_message = super().__call__(record, handler)
|
log_message = super().__call__(record, handler)
|
||||||
return json.dumps(log_message.to_dict(), cls=dbt.utils.JSONEncoder)
|
dct = log_message.to_dict(omit_none=True)
|
||||||
|
return json.dumps(dct, cls=dbt.utils.JSONEncoder)
|
||||||
|
|
||||||
|
|
||||||
class FormatterMixin:
|
class FormatterMixin:
|
||||||
@@ -127,6 +128,7 @@ class OutputHandler(logbook.StreamHandler, FormatterMixin):
|
|||||||
The `format_string` parameter only changes the default text output, not
|
The `format_string` parameter only changes the default text output, not
|
||||||
debug mode or json.
|
debug mode or json.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
stream,
|
stream,
|
||||||
@@ -220,7 +222,8 @@ class TimingProcessor(logbook.Processor):
|
|||||||
|
|
||||||
def process(self, record):
|
def process(self, record):
|
||||||
if self.timing_info is not None:
|
if self.timing_info is not None:
|
||||||
record.extra['timing_info'] = self.timing_info.to_dict()
|
record.extra['timing_info'] = self.timing_info.to_dict(
|
||||||
|
omit_none=True)
|
||||||
|
|
||||||
|
|
||||||
class DbtProcessState(logbook.Processor):
|
class DbtProcessState(logbook.Processor):
|
||||||
@@ -349,6 +352,7 @@ def make_log_dir_if_missing(log_dir):
|
|||||||
class DebugWarnings(logbook.compat.redirected_warnings):
|
class DebugWarnings(logbook.compat.redirected_warnings):
|
||||||
"""Log warnings, except send them to 'debug' instead of 'warning' level.
|
"""Log warnings, except send them to 'debug' instead of 'warning' level.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def make_record(self, message, exception, filename, lineno):
|
def make_record(self, message, exception, filename, lineno):
|
||||||
rv = super().make_record(message, exception, filename, lineno)
|
rv = super().make_record(message, exception, filename, lineno)
|
||||||
rv.level = logbook.DEBUG
|
rv.level = logbook.DEBUG
|
||||||
|
|||||||
@@ -252,7 +252,7 @@ class ConfiguredParser(
|
|||||||
'raw_sql': block.contents,
|
'raw_sql': block.contents,
|
||||||
'unique_id': self.generate_unique_id(name),
|
'unique_id': self.generate_unique_id(name),
|
||||||
'config': self.config_dict(config),
|
'config': self.config_dict(config),
|
||||||
'checksum': block.file.checksum.to_dict(),
|
'checksum': block.file.checksum.to_dict(omit_none=True),
|
||||||
}
|
}
|
||||||
dct.update(kwargs)
|
dct.update(kwargs)
|
||||||
try:
|
try:
|
||||||
@@ -301,7 +301,7 @@ class ConfiguredParser(
|
|||||||
self, parsed_node: IntermediateNode, config_dict: Dict[str, Any]
|
self, parsed_node: IntermediateNode, config_dict: Dict[str, Any]
|
||||||
) -> None:
|
) -> None:
|
||||||
# Overwrite node config
|
# Overwrite node config
|
||||||
final_config_dict = parsed_node.config.to_dict()
|
final_config_dict = parsed_node.config.to_dict(omit_none=True)
|
||||||
final_config_dict.update(config_dict)
|
final_config_dict.update(config_dict)
|
||||||
# re-mangle hooks, in case we got new ones
|
# re-mangle hooks, in case we got new ones
|
||||||
self._mangle_hooks(final_config_dict)
|
self._mangle_hooks(final_config_dict)
|
||||||
|
|||||||
@@ -12,15 +12,21 @@ import dbt.tracking
|
|||||||
import dbt.flags as flags
|
import dbt.flags as flags
|
||||||
|
|
||||||
from dbt.adapters.factory import (
|
from dbt.adapters.factory import (
|
||||||
|
get_adapter,
|
||||||
get_relation_class_by_name,
|
get_relation_class_by_name,
|
||||||
|
get_adapter_package_names,
|
||||||
)
|
)
|
||||||
from dbt.helper_types import PathSet
|
from dbt.helper_types import PathSet
|
||||||
from dbt.logger import GLOBAL_LOGGER as logger, DbtProcessState
|
from dbt.logger import GLOBAL_LOGGER as logger, DbtProcessState
|
||||||
from dbt.node_types import NodeType
|
from dbt.node_types import NodeType
|
||||||
from dbt.clients.jinja import get_rendered
|
from dbt.clients.jinja import get_rendered, MacroStack
|
||||||
|
from dbt.clients.jinja_static import statically_extract_macro_calls
|
||||||
from dbt.clients.system import make_directory
|
from dbt.clients.system import make_directory
|
||||||
from dbt.config import Project, RuntimeConfig
|
from dbt.config import Project, RuntimeConfig
|
||||||
from dbt.context.docs import generate_runtime_docs
|
from dbt.context.docs import generate_runtime_docs
|
||||||
|
from dbt.context.macro_resolver import MacroResolver, TestMacroNamespace
|
||||||
|
from dbt.context.configured import generate_macro_context
|
||||||
|
from dbt.context.providers import ParseProvider
|
||||||
from dbt.contracts.files import FilePath, FileHash
|
from dbt.contracts.files import FilePath, FileHash
|
||||||
from dbt.contracts.graph.compiled import ManifestNode
|
from dbt.contracts.graph.compiled import ManifestNode
|
||||||
from dbt.contracts.graph.manifest import (
|
from dbt.contracts.graph.manifest import (
|
||||||
@@ -257,6 +263,40 @@ class ManifestLoader:
|
|||||||
self._perf_info.path_count + total_path_count
|
self._perf_info.path_count + total_path_count
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Loop through macros in the manifest and statically parse
|
||||||
|
# the 'macro_sql' to find depends_on.macros
|
||||||
|
def reparse_macros(self, macros):
|
||||||
|
internal_package_names = get_adapter_package_names(
|
||||||
|
self.root_project.credentials.type
|
||||||
|
)
|
||||||
|
macro_resolver = MacroResolver(
|
||||||
|
macros,
|
||||||
|
self.root_project.project_name,
|
||||||
|
internal_package_names
|
||||||
|
)
|
||||||
|
macro_ctx = generate_macro_context(self.root_project)
|
||||||
|
macro_namespace = TestMacroNamespace(
|
||||||
|
macro_resolver, {}, None, MacroStack(), []
|
||||||
|
)
|
||||||
|
adapter = get_adapter(self.root_project)
|
||||||
|
db_wrapper = ParseProvider().DatabaseWrapper(
|
||||||
|
adapter, macro_namespace
|
||||||
|
)
|
||||||
|
for macro in macros.values():
|
||||||
|
possible_macro_calls = statically_extract_macro_calls(
|
||||||
|
macro.macro_sql, macro_ctx, db_wrapper)
|
||||||
|
for macro_name in possible_macro_calls:
|
||||||
|
# adapter.dispatch calls can generate a call with the same name as the macro
|
||||||
|
# it ought to be an adapter prefix (postgres_) or default_
|
||||||
|
if macro_name == macro.name:
|
||||||
|
continue
|
||||||
|
package_name = macro.package_name
|
||||||
|
if '.' in macro_name:
|
||||||
|
package_name, macro_name = macro_name.split('.')
|
||||||
|
dep_macro_id = macro_resolver.get_macro_id(package_name, macro_name)
|
||||||
|
if dep_macro_id:
|
||||||
|
macro.depends_on.add_macro(dep_macro_id) # will check for dupes
|
||||||
|
|
||||||
def load_only_macros(self) -> MacroManifest:
|
def load_only_macros(self) -> MacroManifest:
|
||||||
old_results = self.read_parse_results()
|
old_results = self.read_parse_results()
|
||||||
|
|
||||||
@@ -270,6 +310,7 @@ class ManifestLoader:
|
|||||||
macros=self.results.macros,
|
macros=self.results.macros,
|
||||||
files=self.results.files
|
files=self.results.files
|
||||||
)
|
)
|
||||||
|
|
||||||
self.macro_hook(macro_manifest)
|
self.macro_hook(macro_manifest)
|
||||||
return macro_manifest
|
return macro_manifest
|
||||||
|
|
||||||
@@ -282,6 +323,7 @@ class ManifestLoader:
|
|||||||
# store the macros & files from the adapter macro manifest
|
# store the macros & files from the adapter macro manifest
|
||||||
self.results.macros.update(macro_manifest.macros)
|
self.results.macros.update(macro_manifest.macros)
|
||||||
self.results.files.update(macro_manifest.files)
|
self.results.files.update(macro_manifest.files)
|
||||||
|
self.reparse_macros(self.results.macros)
|
||||||
|
|
||||||
start_timer = time.perf_counter()
|
start_timer = time.perf_counter()
|
||||||
|
|
||||||
|
|||||||
@@ -385,7 +385,7 @@ class SchemaParser(SimpleParser[SchemaTestBlock, ParsedSchemaTestNode]):
|
|||||||
'config': self.config_dict(config),
|
'config': self.config_dict(config),
|
||||||
'test_metadata': test_metadata,
|
'test_metadata': test_metadata,
|
||||||
'column_name': column_name,
|
'column_name': column_name,
|
||||||
'checksum': FileHash.empty().to_dict(),
|
'checksum': FileHash.empty().to_dict(omit_none=True),
|
||||||
}
|
}
|
||||||
try:
|
try:
|
||||||
ParsedSchemaTestNode.validate(dct)
|
ParsedSchemaTestNode.validate(dct)
|
||||||
|
|||||||
@@ -68,7 +68,8 @@ class SnapshotParser(
|
|||||||
|
|
||||||
def transform(self, node: IntermediateSnapshotNode) -> ParsedSnapshotNode:
|
def transform(self, node: IntermediateSnapshotNode) -> ParsedSnapshotNode:
|
||||||
try:
|
try:
|
||||||
parsed_node = ParsedSnapshotNode.from_dict(node.to_dict())
|
dct = node.to_dict(omit_none=True)
|
||||||
|
parsed_node = ParsedSnapshotNode.from_dict(dct)
|
||||||
self.set_snapshot_attributes(parsed_node)
|
self.set_snapshot_attributes(parsed_node)
|
||||||
return parsed_node
|
return parsed_node
|
||||||
except ValidationError as exc:
|
except ValidationError as exc:
|
||||||
|
|||||||
@@ -49,8 +49,8 @@ class SourcePatcher:
|
|||||||
if patch is None:
|
if patch is None:
|
||||||
return unpatched
|
return unpatched
|
||||||
|
|
||||||
source_dct = unpatched.source.to_dict()
|
source_dct = unpatched.source.to_dict(omit_none=True)
|
||||||
table_dct = unpatched.table.to_dict()
|
table_dct = unpatched.table.to_dict(omit_none=True)
|
||||||
patch_path: Optional[Path] = None
|
patch_path: Optional[Path] = None
|
||||||
|
|
||||||
source_table_patch: Optional[SourceTablePatch] = None
|
source_table_patch: Optional[SourceTablePatch] = None
|
||||||
|
|||||||
@@ -177,7 +177,7 @@ def poll_complete(
|
|||||||
|
|
||||||
|
|
||||||
def _dict_logs(logs: List[LogMessage]) -> List[Dict[str, Any]]:
|
def _dict_logs(logs: List[LogMessage]) -> List[Dict[str, Any]]:
|
||||||
return [log.to_dict() for log in logs]
|
return [log.to_dict(omit_none=True) for log in logs]
|
||||||
|
|
||||||
|
|
||||||
class Poll(RemoteBuiltinMethod[PollParameters, PollResult]):
|
class Poll(RemoteBuiltinMethod[PollParameters, PollResult]):
|
||||||
|
|||||||
@@ -97,7 +97,7 @@ class ResponseManager(JSONRPCResponseManager):
|
|||||||
# Note: errors in to_dict do not show up anywhere in
|
# Note: errors in to_dict do not show up anywhere in
|
||||||
# the output and all you get is a generic 500 error
|
# the output and all you get is a generic 500 error
|
||||||
output.result = \
|
output.result = \
|
||||||
output.result.to_dict(options={'keep_none': True})
|
output.result.to_dict(omit_none=False)
|
||||||
yield output
|
yield output
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|||||||
@@ -391,7 +391,7 @@ class RequestTaskHandler(threading.Thread, TaskHandlerProtocol):
|
|||||||
except RPCException as exc:
|
except RPCException as exc:
|
||||||
# RPC Exceptions come already preserialized for the jsonrpc
|
# RPC Exceptions come already preserialized for the jsonrpc
|
||||||
# framework
|
# framework
|
||||||
exc.logs = [log.to_dict() for log in self.logs]
|
exc.logs = [log.to_dict(omit_none=True) for log in self.logs]
|
||||||
exc.tags = self.tags
|
exc.tags = self.tags
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
|||||||
@@ -114,8 +114,8 @@ class Catalog(Dict[CatalogKey, CatalogTable]):
|
|||||||
if unique_id in sources:
|
if unique_id in sources:
|
||||||
dbt.exceptions.raise_ambiguous_catalog_match(
|
dbt.exceptions.raise_ambiguous_catalog_match(
|
||||||
unique_id,
|
unique_id,
|
||||||
sources[unique_id].to_dict(),
|
sources[unique_id].to_dict(omit_none=True),
|
||||||
table.to_dict(),
|
table.to_dict(omit_none=True),
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
sources[unique_id] = table.replace(unique_id=unique_id)
|
sources[unique_id] = table.replace(unique_id=unique_id)
|
||||||
|
|||||||
@@ -110,7 +110,7 @@ class ListTask(GraphRunnableTask):
|
|||||||
for node in self._iterate_selected_nodes():
|
for node in self._iterate_selected_nodes():
|
||||||
yield json.dumps({
|
yield json.dumps({
|
||||||
k: v
|
k: v
|
||||||
for k, v in node.to_dict(options={'keep_none': True}).items()
|
for k, v in node.to_dict(omit_none=False).items()
|
||||||
if k in self.ALLOWED_KEYS
|
if k in self.ALLOWED_KEYS
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|||||||
@@ -169,7 +169,7 @@ def print_snapshot_result_line(
|
|||||||
|
|
||||||
info, status, logger_fn = get_printable_result(
|
info, status, logger_fn = get_printable_result(
|
||||||
result, 'snapshotted', 'snapshotting')
|
result, 'snapshotted', 'snapshotting')
|
||||||
cfg = model.config.to_dict()
|
cfg = model.config.to_dict(omit_none=True)
|
||||||
|
|
||||||
msg = "{info} {description}".format(
|
msg = "{info} {description}".format(
|
||||||
info=info, description=description, **cfg)
|
info=info, description=description, **cfg)
|
||||||
|
|||||||
@@ -117,7 +117,7 @@ def track_model_run(index, num_nodes, run_model_result):
|
|||||||
"hashed_contents": utils.get_hashed_contents(
|
"hashed_contents": utils.get_hashed_contents(
|
||||||
run_model_result.node
|
run_model_result.node
|
||||||
),
|
),
|
||||||
"timing": [t.to_dict() for t in run_model_result.timing],
|
"timing": [t.to_dict(omit_none=True) for t in run_model_result.timing],
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
||||||
@@ -193,7 +193,7 @@ class ModelRunner(CompileRunner):
|
|||||||
result = context['load_result']('main')
|
result = context['load_result']('main')
|
||||||
adapter_response = {}
|
adapter_response = {}
|
||||||
if isinstance(result.response, dbtClassMixin):
|
if isinstance(result.response, dbtClassMixin):
|
||||||
adapter_response = result.response.to_dict()
|
adapter_response = result.response.to_dict(omit_none=True)
|
||||||
return RunResult(
|
return RunResult(
|
||||||
node=model,
|
node=model,
|
||||||
status=RunStatus.Success,
|
status=RunStatus.Success,
|
||||||
|
|||||||
@@ -29,6 +29,7 @@ PACKAGE_INSTALL_SPEC = 'iglu:com.dbt/package_install/jsonschema/1-0-0'
|
|||||||
RPC_REQUEST_SPEC = 'iglu:com.dbt/rpc_request/jsonschema/1-0-1'
|
RPC_REQUEST_SPEC = 'iglu:com.dbt/rpc_request/jsonschema/1-0-1'
|
||||||
DEPRECATION_WARN_SPEC = 'iglu:com.dbt/deprecation_warn/jsonschema/1-0-0'
|
DEPRECATION_WARN_SPEC = 'iglu:com.dbt/deprecation_warn/jsonschema/1-0-0'
|
||||||
LOAD_ALL_TIMING_SPEC = 'iglu:com.dbt/load_all_timing/jsonschema/1-0-0'
|
LOAD_ALL_TIMING_SPEC = 'iglu:com.dbt/load_all_timing/jsonschema/1-0-0'
|
||||||
|
RESOURCE_COUNTS = 'iglu:com.dbt/resource_counts/jsonschema/1-0-0'
|
||||||
|
|
||||||
DBT_INVOCATION_ENV = 'DBT_INVOCATION_ENV'
|
DBT_INVOCATION_ENV = 'DBT_INVOCATION_ENV'
|
||||||
|
|
||||||
@@ -289,6 +290,20 @@ def track_project_load(options):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def track_resource_counts(resource_counts):
|
||||||
|
context = [SelfDescribingJson(RESOURCE_COUNTS, resource_counts)]
|
||||||
|
assert active_user is not None, \
|
||||||
|
'Cannot track resource counts when active user is None'
|
||||||
|
|
||||||
|
track(
|
||||||
|
active_user,
|
||||||
|
category='dbt',
|
||||||
|
action='resource_counts',
|
||||||
|
label=active_user.invocation_id,
|
||||||
|
context=context
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def track_model_run(options):
|
def track_model_run(options):
|
||||||
context = [SelfDescribingJson(RUN_MODEL_SPEC, options)]
|
context = [SelfDescribingJson(RUN_MODEL_SPEC, options)]
|
||||||
assert active_user is not None, \
|
assert active_user is not None, \
|
||||||
|
|||||||
@@ -320,7 +320,7 @@ class JSONEncoder(json.JSONEncoder):
|
|||||||
if hasattr(obj, 'to_dict'):
|
if hasattr(obj, 'to_dict'):
|
||||||
# if we have a to_dict we should try to serialize the result of
|
# if we have a to_dict we should try to serialize the result of
|
||||||
# that!
|
# that!
|
||||||
return obj.to_dict()
|
return obj.to_dict(omit_none=True)
|
||||||
return super().default(obj)
|
return super().default(obj)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -96,5 +96,5 @@ def _get_dbt_plugins_info():
|
|||||||
yield plugin_name, mod.version
|
yield plugin_name, mod.version
|
||||||
|
|
||||||
|
|
||||||
__version__ = '0.19.1b2'
|
__version__ = '0.19.2'
|
||||||
installed = get_installed_version()
|
installed = get_installed_version()
|
||||||
|
|||||||
@@ -1,201 +0,0 @@
|
|||||||
Apache License
|
|
||||||
Version 2.0, January 2004
|
|
||||||
http://www.apache.org/licenses/
|
|
||||||
|
|
||||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
|
||||||
|
|
||||||
1. Definitions.
|
|
||||||
|
|
||||||
"License" shall mean the terms and conditions for use, reproduction,
|
|
||||||
and distribution as defined by Sections 1 through 9 of this document.
|
|
||||||
|
|
||||||
"Licensor" shall mean the copyright owner or entity authorized by
|
|
||||||
the copyright owner that is granting the License.
|
|
||||||
|
|
||||||
"Legal Entity" shall mean the union of the acting entity and all
|
|
||||||
other entities that control, are controlled by, or are under common
|
|
||||||
control with that entity. For the purposes of this definition,
|
|
||||||
"control" means (i) the power, direct or indirect, to cause the
|
|
||||||
direction or management of such entity, whether by contract or
|
|
||||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
|
||||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
|
||||||
|
|
||||||
"You" (or "Your") shall mean an individual or Legal Entity
|
|
||||||
exercising permissions granted by this License.
|
|
||||||
|
|
||||||
"Source" form shall mean the preferred form for making modifications,
|
|
||||||
including but not limited to software source code, documentation
|
|
||||||
source, and configuration files.
|
|
||||||
|
|
||||||
"Object" form shall mean any form resulting from mechanical
|
|
||||||
transformation or translation of a Source form, including but
|
|
||||||
not limited to compiled object code, generated documentation,
|
|
||||||
and conversions to other media types.
|
|
||||||
|
|
||||||
"Work" shall mean the work of authorship, whether in Source or
|
|
||||||
Object form, made available under the License, as indicated by a
|
|
||||||
copyright notice that is included in or attached to the work
|
|
||||||
(an example is provided in the Appendix below).
|
|
||||||
|
|
||||||
"Derivative Works" shall mean any work, whether in Source or Object
|
|
||||||
form, that is based on (or derived from) the Work and for which the
|
|
||||||
editorial revisions, annotations, elaborations, or other modifications
|
|
||||||
represent, as a whole, an original work of authorship. For the purposes
|
|
||||||
of this License, Derivative Works shall not include works that remain
|
|
||||||
separable from, or merely link (or bind by name) to the interfaces of,
|
|
||||||
the Work and Derivative Works thereof.
|
|
||||||
|
|
||||||
"Contribution" shall mean any work of authorship, including
|
|
||||||
the original version of the Work and any modifications or additions
|
|
||||||
to that Work or Derivative Works thereof, that is intentionally
|
|
||||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
|
||||||
or by an individual or Legal Entity authorized to submit on behalf of
|
|
||||||
the copyright owner. For the purposes of this definition, "submitted"
|
|
||||||
means any form of electronic, verbal, or written communication sent
|
|
||||||
to the Licensor or its representatives, including but not limited to
|
|
||||||
communication on electronic mailing lists, source code control systems,
|
|
||||||
and issue tracking systems that are managed by, or on behalf of, the
|
|
||||||
Licensor for the purpose of discussing and improving the Work, but
|
|
||||||
excluding communication that is conspicuously marked or otherwise
|
|
||||||
designated in writing by the copyright owner as "Not a Contribution."
|
|
||||||
|
|
||||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
|
||||||
on behalf of whom a Contribution has been received by Licensor and
|
|
||||||
subsequently incorporated within the Work.
|
|
||||||
|
|
||||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
|
||||||
this License, each Contributor hereby grants to You a perpetual,
|
|
||||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
|
||||||
copyright license to reproduce, prepare Derivative Works of,
|
|
||||||
publicly display, publicly perform, sublicense, and distribute the
|
|
||||||
Work and such Derivative Works in Source or Object form.
|
|
||||||
|
|
||||||
3. Grant of Patent License. Subject to the terms and conditions of
|
|
||||||
this License, each Contributor hereby grants to You a perpetual,
|
|
||||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
|
||||||
(except as stated in this section) patent license to make, have made,
|
|
||||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
|
||||||
where such license applies only to those patent claims licensable
|
|
||||||
by such Contributor that are necessarily infringed by their
|
|
||||||
Contribution(s) alone or by combination of their Contribution(s)
|
|
||||||
with the Work to which such Contribution(s) was submitted. If You
|
|
||||||
institute patent litigation against any entity (including a
|
|
||||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
|
||||||
or a Contribution incorporated within the Work constitutes direct
|
|
||||||
or contributory patent infringement, then any patent licenses
|
|
||||||
granted to You under this License for that Work shall terminate
|
|
||||||
as of the date such litigation is filed.
|
|
||||||
|
|
||||||
4. Redistribution. You may reproduce and distribute copies of the
|
|
||||||
Work or Derivative Works thereof in any medium, with or without
|
|
||||||
modifications, and in Source or Object form, provided that You
|
|
||||||
meet the following conditions:
|
|
||||||
|
|
||||||
(a) You must give any other recipients of the Work or
|
|
||||||
Derivative Works a copy of this License; and
|
|
||||||
|
|
||||||
(b) You must cause any modified files to carry prominent notices
|
|
||||||
stating that You changed the files; and
|
|
||||||
|
|
||||||
(c) You must retain, in the Source form of any Derivative Works
|
|
||||||
that You distribute, all copyright, patent, trademark, and
|
|
||||||
attribution notices from the Source form of the Work,
|
|
||||||
excluding those notices that do not pertain to any part of
|
|
||||||
the Derivative Works; and
|
|
||||||
|
|
||||||
(d) If the Work includes a "NOTICE" text file as part of its
|
|
||||||
distribution, then any Derivative Works that You distribute must
|
|
||||||
include a readable copy of the attribution notices contained
|
|
||||||
within such NOTICE file, excluding those notices that do not
|
|
||||||
pertain to any part of the Derivative Works, in at least one
|
|
||||||
of the following places: within a NOTICE text file distributed
|
|
||||||
as part of the Derivative Works; within the Source form or
|
|
||||||
documentation, if provided along with the Derivative Works; or,
|
|
||||||
within a display generated by the Derivative Works, if and
|
|
||||||
wherever such third-party notices normally appear. The contents
|
|
||||||
of the NOTICE file are for informational purposes only and
|
|
||||||
do not modify the License. You may add Your own attribution
|
|
||||||
notices within Derivative Works that You distribute, alongside
|
|
||||||
or as an addendum to the NOTICE text from the Work, provided
|
|
||||||
that such additional attribution notices cannot be construed
|
|
||||||
as modifying the License.
|
|
||||||
|
|
||||||
You may add Your own copyright statement to Your modifications and
|
|
||||||
may provide additional or different license terms and conditions
|
|
||||||
for use, reproduction, or distribution of Your modifications, or
|
|
||||||
for any such Derivative Works as a whole, provided Your use,
|
|
||||||
reproduction, and distribution of the Work otherwise complies with
|
|
||||||
the conditions stated in this License.
|
|
||||||
|
|
||||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
|
||||||
any Contribution intentionally submitted for inclusion in the Work
|
|
||||||
by You to the Licensor shall be under the terms and conditions of
|
|
||||||
this License, without any additional terms or conditions.
|
|
||||||
Notwithstanding the above, nothing herein shall supersede or modify
|
|
||||||
the terms of any separate license agreement you may have executed
|
|
||||||
with Licensor regarding such Contributions.
|
|
||||||
|
|
||||||
6. Trademarks. This License does not grant permission to use the trade
|
|
||||||
names, trademarks, service marks, or product names of the Licensor,
|
|
||||||
except as required for reasonable and customary use in describing the
|
|
||||||
origin of the Work and reproducing the content of the NOTICE file.
|
|
||||||
|
|
||||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
|
||||||
agreed to in writing, Licensor provides the Work (and each
|
|
||||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
|
||||||
implied, including, without limitation, any warranties or conditions
|
|
||||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
|
||||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
|
||||||
appropriateness of using or redistributing the Work and assume any
|
|
||||||
risks associated with Your exercise of permissions under this License.
|
|
||||||
|
|
||||||
8. Limitation of Liability. In no event and under no legal theory,
|
|
||||||
whether in tort (including negligence), contract, or otherwise,
|
|
||||||
unless required by applicable law (such as deliberate and grossly
|
|
||||||
negligent acts) or agreed to in writing, shall any Contributor be
|
|
||||||
liable to You for damages, including any direct, indirect, special,
|
|
||||||
incidental, or consequential damages of any character arising as a
|
|
||||||
result of this License or out of the use or inability to use the
|
|
||||||
Work (including but not limited to damages for loss of goodwill,
|
|
||||||
work stoppage, computer failure or malfunction, or any and all
|
|
||||||
other commercial damages or losses), even if such Contributor
|
|
||||||
has been advised of the possibility of such damages.
|
|
||||||
|
|
||||||
9. Accepting Warranty or Additional Liability. While redistributing
|
|
||||||
the Work or Derivative Works thereof, You may choose to offer,
|
|
||||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
|
||||||
or other liability obligations and/or rights consistent with this
|
|
||||||
License. However, in accepting such obligations, You may act only
|
|
||||||
on Your own behalf and on Your sole responsibility, not on behalf
|
|
||||||
of any other Contributor, and only if You agree to indemnify,
|
|
||||||
defend, and hold each Contributor harmless for any liability
|
|
||||||
incurred by, or claims asserted against, such Contributor by reason
|
|
||||||
of your accepting any such warranty or additional liability.
|
|
||||||
|
|
||||||
END OF TERMS AND CONDITIONS
|
|
||||||
|
|
||||||
APPENDIX: How to apply the Apache License to your work.
|
|
||||||
|
|
||||||
To apply the Apache License to your work, attach the following
|
|
||||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
|
||||||
replaced with your own identifying information. (Don't include
|
|
||||||
the brackets!) The text should be enclosed in the appropriate
|
|
||||||
comment syntax for the file format. We also recommend that a
|
|
||||||
file or class name and description of purpose be included on the
|
|
||||||
same "printed page" as the copyright notice for easier
|
|
||||||
identification within third-party archives.
|
|
||||||
|
|
||||||
Copyright 2017 Alexander Tikhonov
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
@@ -1,15 +0,0 @@
|
|||||||
from mashumaro.exceptions import MissingField
|
|
||||||
from mashumaro.helper import field_options
|
|
||||||
from mashumaro.serializer.base.dict import DataClassDictMixin
|
|
||||||
from mashumaro.serializer.json import DataClassJSONMixin
|
|
||||||
from mashumaro.serializer.msgpack import DataClassMessagePackMixin
|
|
||||||
from mashumaro.serializer.yaml import DataClassYAMLMixin
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"MissingField",
|
|
||||||
"DataClassDictMixin",
|
|
||||||
"DataClassJSONMixin",
|
|
||||||
"DataClassMessagePackMixin",
|
|
||||||
"DataClassYAMLMixin",
|
|
||||||
"field_options",
|
|
||||||
]
|
|
||||||
@@ -1,84 +0,0 @@
|
|||||||
from mashumaro.meta.helpers import type_name
|
|
||||||
|
|
||||||
|
|
||||||
class MissingField(LookupError):
|
|
||||||
def __init__(self, field_name, field_type, holder_class):
|
|
||||||
self.field_name = field_name
|
|
||||||
self.field_type = field_type
|
|
||||||
self.holder_class = holder_class
|
|
||||||
|
|
||||||
@property
|
|
||||||
def field_type_name(self):
|
|
||||||
return type_name(self.field_type)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def holder_class_name(self):
|
|
||||||
return type_name(self.holder_class)
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return (
|
|
||||||
f'Field "{self.field_name}" of type {self.field_type_name}'
|
|
||||||
f" is missing in {self.holder_class_name} instance"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class UnserializableDataError(TypeError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class UnserializableField(UnserializableDataError):
|
|
||||||
def __init__(self, field_name, field_type, holder_class, msg=None):
|
|
||||||
self.field_name = field_name
|
|
||||||
self.field_type = field_type
|
|
||||||
self.holder_class = holder_class
|
|
||||||
self.msg = msg
|
|
||||||
|
|
||||||
@property
|
|
||||||
def field_type_name(self):
|
|
||||||
return type_name(self.field_type)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def holder_class_name(self):
|
|
||||||
return type_name(self.holder_class)
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
s = (
|
|
||||||
f'Field "{self.field_name}" of type {self.field_type_name} '
|
|
||||||
f"in {self.holder_class_name} is not serializable"
|
|
||||||
)
|
|
||||||
if self.msg:
|
|
||||||
s += f": {self.msg}"
|
|
||||||
return s
|
|
||||||
|
|
||||||
|
|
||||||
class InvalidFieldValue(ValueError):
|
|
||||||
def __init__(
|
|
||||||
self, field_name, field_type, field_value, holder_class, msg=None
|
|
||||||
):
|
|
||||||
self.field_name = field_name
|
|
||||||
self.field_type = field_type
|
|
||||||
self.field_value = field_value
|
|
||||||
self.holder_class = holder_class
|
|
||||||
self.msg = msg
|
|
||||||
|
|
||||||
@property
|
|
||||||
def field_type_name(self):
|
|
||||||
return type_name(self.field_type)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def holder_class_name(self):
|
|
||||||
return type_name(self.holder_class)
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
s = (
|
|
||||||
f'Field "{self.field_name}" of type {self.field_type_name} '
|
|
||||||
f"in {self.holder_class_name} has invalid value "
|
|
||||||
f"{repr(self.field_value)}"
|
|
||||||
)
|
|
||||||
if self.msg:
|
|
||||||
s += f": {self.msg}"
|
|
||||||
return s
|
|
||||||
|
|
||||||
|
|
||||||
class BadHookSignature(TypeError):
|
|
||||||
pass
|
|
||||||
@@ -1,11 +0,0 @@
|
|||||||
from typing import Any, Callable, Optional, Union
|
|
||||||
|
|
||||||
|
|
||||||
def field_options(
|
|
||||||
serialize: Optional[Callable[[Any], Any]] = None,
|
|
||||||
deserialize: Optional[Union[str, Callable[[Any], Any]]] = None,
|
|
||||||
):
|
|
||||||
return {"serialize": serialize, "deserialize": deserialize}
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = ["field_options"]
|
|
||||||
@@ -1,105 +0,0 @@
|
|||||||
import dataclasses
|
|
||||||
import types
|
|
||||||
import typing
|
|
||||||
|
|
||||||
from .macros import PY_36, PY_37, PY_38, PY_39
|
|
||||||
|
|
||||||
|
|
||||||
def get_imported_module_names():
|
|
||||||
# noinspection PyUnresolvedReferences
|
|
||||||
return {
|
|
||||||
value.__name__
|
|
||||||
for value in globals().values()
|
|
||||||
if isinstance(value, types.ModuleType)
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def get_type_origin(t):
|
|
||||||
try:
|
|
||||||
if PY_36:
|
|
||||||
return t.__extra__
|
|
||||||
elif PY_37 or PY_38 or PY_39:
|
|
||||||
return t.__origin__
|
|
||||||
except AttributeError:
|
|
||||||
return t
|
|
||||||
|
|
||||||
|
|
||||||
def type_name(t):
|
|
||||||
if is_generic(t):
|
|
||||||
return str(t)
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
return f"{t.__module__}.{t.__name__}"
|
|
||||||
except AttributeError:
|
|
||||||
return str(t)
|
|
||||||
|
|
||||||
|
|
||||||
def is_special_typing_primitive(t):
|
|
||||||
try:
|
|
||||||
issubclass(t, object)
|
|
||||||
return False
|
|
||||||
except TypeError:
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def is_generic(t):
|
|
||||||
if PY_37 or PY_38 or PY_39:
|
|
||||||
# noinspection PyProtectedMember
|
|
||||||
# noinspection PyUnresolvedReferences
|
|
||||||
return t.__class__ is typing._GenericAlias
|
|
||||||
elif PY_36:
|
|
||||||
# noinspection PyUnresolvedReferences
|
|
||||||
return issubclass(t.__class__, typing.GenericMeta)
|
|
||||||
else:
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
|
|
||||||
def is_union(t):
|
|
||||||
try:
|
|
||||||
return t.__origin__ is typing.Union
|
|
||||||
except AttributeError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def is_type_var(t):
|
|
||||||
return hasattr(t, "__constraints__")
|
|
||||||
|
|
||||||
|
|
||||||
def is_class_var(t):
|
|
||||||
if PY_36:
|
|
||||||
return (
|
|
||||||
is_special_typing_primitive(t) and type(t).__name__ == "_ClassVar"
|
|
||||||
)
|
|
||||||
if PY_37 or PY_38 or PY_39:
|
|
||||||
return get_type_origin(t) is typing.ClassVar
|
|
||||||
else:
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
|
|
||||||
def is_init_var(t):
|
|
||||||
if PY_36 or PY_37:
|
|
||||||
return get_type_origin(t) is dataclasses.InitVar
|
|
||||||
elif PY_38 or PY_39:
|
|
||||||
return isinstance(t, dataclasses.InitVar)
|
|
||||||
else:
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
|
|
||||||
def get_class_that_define_method(method_name, cls):
|
|
||||||
for cls in cls.__mro__:
|
|
||||||
if method_name in cls.__dict__:
|
|
||||||
return cls
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"get_imported_module_names",
|
|
||||||
"get_type_origin",
|
|
||||||
"type_name",
|
|
||||||
"is_special_typing_primitive",
|
|
||||||
"is_generic",
|
|
||||||
"is_union",
|
|
||||||
"is_type_var",
|
|
||||||
"is_class_var",
|
|
||||||
"is_init_var",
|
|
||||||
"get_class_that_define_method",
|
|
||||||
]
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
import sys
|
|
||||||
|
|
||||||
PY_36 = sys.version_info.major == 3 and sys.version_info.minor == 6
|
|
||||||
PY_37 = sys.version_info.major == 3 and sys.version_info.minor == 7
|
|
||||||
PY_38 = sys.version_info.major == 3 and sys.version_info.minor == 8
|
|
||||||
PY_39 = sys.version_info.major == 3 and sys.version_info.minor == 9
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = ["PY_36", "PY_37", "PY_38", "PY_39"]
|
|
||||||
@@ -1,13 +0,0 @@
|
|||||||
from .macros import PY_36
|
|
||||||
|
|
||||||
|
|
||||||
def patch_fromisoformat():
|
|
||||||
if PY_36:
|
|
||||||
# noinspection PyPackageRequirements
|
|
||||||
# noinspection PyUnresolvedReferences
|
|
||||||
from backports.datetime_fromisoformat import MonkeyPatch
|
|
||||||
|
|
||||||
MonkeyPatch.patch_fromisoformat()
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = ["patch_fromisoformat"]
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
from .dict import DataClassDictMixin
|
|
||||||
|
|
||||||
__all__ = ["DataClassDictMixin"]
|
|
||||||
@@ -1,61 +0,0 @@
|
|||||||
# this file has been modified for use with dbt
|
|
||||||
from typing import Any, Dict, Mapping, Type, TypeVar, Optional, ClassVar
|
|
||||||
|
|
||||||
from mashumaro.serializer.base.metaprogramming import CodeBuilder
|
|
||||||
from mashumaro.types import SerializableEncoder
|
|
||||||
|
|
||||||
T = TypeVar("T", bound="DataClassDictMixin")
|
|
||||||
|
|
||||||
|
|
||||||
class DataClassDictMixin:
|
|
||||||
_serializable_encoders: ClassVar[Dict[Type, SerializableEncoder]] = {}
|
|
||||||
|
|
||||||
def __init_subclass__(cls: Type[T], **kwargs):
|
|
||||||
builder = CodeBuilder(cls)
|
|
||||||
exc = None
|
|
||||||
try:
|
|
||||||
builder.add_from_dict()
|
|
||||||
except Exception as e:
|
|
||||||
exc = e
|
|
||||||
try:
|
|
||||||
builder.add_to_dict()
|
|
||||||
except Exception as e:
|
|
||||||
exc = e
|
|
||||||
if exc:
|
|
||||||
raise exc
|
|
||||||
|
|
||||||
def to_dict(
|
|
||||||
self: T,
|
|
||||||
use_bytes: bool = False,
|
|
||||||
use_enum: bool = False,
|
|
||||||
use_datetime: bool = False,
|
|
||||||
options: Optional[Dict[str, Any]] = None,
|
|
||||||
) -> dict:
|
|
||||||
...
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_dict(
|
|
||||||
cls: Type[T],
|
|
||||||
d: Mapping,
|
|
||||||
use_bytes: bool = False,
|
|
||||||
use_enum: bool = False,
|
|
||||||
use_datetime: bool = False,
|
|
||||||
options: Optional[Dict[str, Any]] = None,
|
|
||||||
) -> T:
|
|
||||||
...
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def __pre_deserialize__(cls: Type[T], d: Dict[Any, Any], options: Optional[Dict[str, Any]]) -> Dict[Any, Any]:
|
|
||||||
...
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def __post_deserialize__(cls: Type[T], obj: T, options: Optional[Dict[str, Any]]) -> T:
|
|
||||||
...
|
|
||||||
|
|
||||||
def __pre_serialize__(self: T, options: Optional[Dict[str, Any]]) -> T:
|
|
||||||
...
|
|
||||||
|
|
||||||
def __post_serialize__(self: T, d: Dict[Any, Any], options: Optional[Dict[str, Any]]) -> Dict[Any, Any]:
|
|
||||||
...
|
|
||||||
|
|
||||||
__all__ = ["DataClassDictMixin"]
|
|
||||||
@@ -1,26 +0,0 @@
|
|||||||
import datetime
|
|
||||||
import re
|
|
||||||
|
|
||||||
|
|
||||||
def parse_timezone(s: str):
|
|
||||||
regexp = re.compile(r"^UTC(([+-][0-2][0-9]):([0-5][0-9]))?$")
|
|
||||||
match = regexp.match(s)
|
|
||||||
if not match:
|
|
||||||
raise ValueError(
|
|
||||||
f"Time zone {s} must be either UTC " f"or in format UTC[+-]hh:mm"
|
|
||||||
)
|
|
||||||
if match.group(1):
|
|
||||||
hours = int(match.group(2))
|
|
||||||
minutes = int(match.group(3))
|
|
||||||
return datetime.timezone(
|
|
||||||
datetime.timedelta(
|
|
||||||
hours=hours, minutes=minutes if hours >= 0 else -minutes
|
|
||||||
)
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
return datetime.timezone.utc
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"parse_timezone",
|
|
||||||
]
|
|
||||||
@@ -1,830 +0,0 @@
|
|||||||
# this file as been modified for use with dbt
|
|
||||||
# noinspection PyUnresolvedReferences
|
|
||||||
import builtins # noqa
|
|
||||||
import collections
|
|
||||||
import collections.abc
|
|
||||||
import datetime
|
|
||||||
import enum
|
|
||||||
import ipaddress
|
|
||||||
import os
|
|
||||||
import pathlib
|
|
||||||
import typing
|
|
||||||
import uuid
|
|
||||||
|
|
||||||
# noinspection PyUnresolvedReferences
|
|
||||||
from base64 import decodebytes, encodebytes # noqa
|
|
||||||
from contextlib import contextmanager, suppress
|
|
||||||
|
|
||||||
# noinspection PyProtectedMember
|
|
||||||
from dataclasses import _FIELDS, MISSING, Field, is_dataclass
|
|
||||||
from decimal import Decimal
|
|
||||||
from fractions import Fraction
|
|
||||||
|
|
||||||
# noinspection PyUnresolvedReferences
|
|
||||||
from mashumaro.exceptions import ( # noqa
|
|
||||||
BadHookSignature,
|
|
||||||
InvalidFieldValue,
|
|
||||||
MissingField,
|
|
||||||
UnserializableDataError,
|
|
||||||
UnserializableField,
|
|
||||||
)
|
|
||||||
from mashumaro.meta.helpers import (
|
|
||||||
get_class_that_define_method,
|
|
||||||
get_imported_module_names,
|
|
||||||
get_type_origin,
|
|
||||||
is_class_var,
|
|
||||||
is_generic,
|
|
||||||
is_init_var,
|
|
||||||
is_special_typing_primitive,
|
|
||||||
is_type_var,
|
|
||||||
is_union,
|
|
||||||
type_name,
|
|
||||||
)
|
|
||||||
from mashumaro.meta.patch import patch_fromisoformat
|
|
||||||
from mashumaro.serializer.base.helpers import * # noqa
|
|
||||||
from mashumaro.types import SerializableType, SerializationStrategy
|
|
||||||
|
|
||||||
patch_fromisoformat()
|
|
||||||
|
|
||||||
|
|
||||||
NoneType = type(None)
|
|
||||||
INITIAL_MODULES = get_imported_module_names()
|
|
||||||
|
|
||||||
|
|
||||||
__PRE_SERIALIZE__ = "__pre_serialize__"
|
|
||||||
__PRE_DESERIALIZE__ = "__pre_deserialize__"
|
|
||||||
__POST_SERIALIZE__ = "__post_serialize__"
|
|
||||||
__POST_DESERIALIZE__ = "__post_deserialize__"
|
|
||||||
|
|
||||||
DataClassDictMixinPath = "mashumaro.serializer.base.dict.DataClassDictMixin"
|
|
||||||
|
|
||||||
|
|
||||||
class CodeBuilder:
|
|
||||||
def __init__(self, cls):
|
|
||||||
self.cls = cls
|
|
||||||
self.lines: typing.List[str] = []
|
|
||||||
self.modules: typing.Set[str] = set()
|
|
||||||
self.globals: typing.Set[str] = set()
|
|
||||||
self._current_indent: str = ""
|
|
||||||
|
|
||||||
def reset(self) -> None:
|
|
||||||
self.lines = []
|
|
||||||
self.modules = INITIAL_MODULES.copy()
|
|
||||||
self.globals = set()
|
|
||||||
self._current_indent = ""
|
|
||||||
|
|
||||||
@property
|
|
||||||
def namespace(self) -> typing.Dict[typing.Any, typing.Any]:
|
|
||||||
return self.cls.__dict__
|
|
||||||
|
|
||||||
@property
|
|
||||||
def annotations(self) -> typing.Dict[str, typing.Any]:
|
|
||||||
return self.namespace.get("__annotations__", {})
|
|
||||||
|
|
||||||
def __get_field_types(
|
|
||||||
self, recursive=True
|
|
||||||
) -> typing.Dict[str, typing.Any]:
|
|
||||||
fields = {}
|
|
||||||
for fname, ftype in typing.get_type_hints(self.cls).items():
|
|
||||||
if is_class_var(ftype) or is_init_var(ftype):
|
|
||||||
continue
|
|
||||||
if recursive or fname in self.annotations:
|
|
||||||
fields[fname] = ftype
|
|
||||||
return fields
|
|
||||||
|
|
||||||
@property
|
|
||||||
def field_types(self) -> typing.Dict[str, typing.Any]:
|
|
||||||
return self.__get_field_types()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def defaults(self) -> typing.Dict[str, typing.Any]:
|
|
||||||
d = {}
|
|
||||||
for ancestor in self.cls.__mro__[-1:0:-1]:
|
|
||||||
if is_dataclass(ancestor):
|
|
||||||
for field in getattr(ancestor, _FIELDS).values():
|
|
||||||
if field.default is not MISSING:
|
|
||||||
d[field.name] = field.default
|
|
||||||
else:
|
|
||||||
d[field.name] = field.default_factory
|
|
||||||
for name in self.__get_field_types(recursive=False):
|
|
||||||
field = self.namespace.get(name, MISSING)
|
|
||||||
if isinstance(field, Field):
|
|
||||||
if field.default is not MISSING:
|
|
||||||
d[name] = field.default
|
|
||||||
else:
|
|
||||||
# https://github.com/python/mypy/issues/6910
|
|
||||||
d[name] = field.default_factory # type: ignore
|
|
||||||
else:
|
|
||||||
d[name] = field
|
|
||||||
return d
|
|
||||||
|
|
||||||
@property
|
|
||||||
def metadatas(self) -> typing.Dict[str, typing.Mapping[str, typing.Any]]:
|
|
||||||
d = {}
|
|
||||||
for ancestor in self.cls.__mro__[-1:0:-1]:
|
|
||||||
if is_dataclass(ancestor):
|
|
||||||
for field in getattr(ancestor, _FIELDS).values():
|
|
||||||
d[field.name] = field.metadata
|
|
||||||
for name in self.__get_field_types(recursive=False):
|
|
||||||
field = self.namespace.get(name, MISSING)
|
|
||||||
if isinstance(field, Field):
|
|
||||||
d[name] = field.metadata
|
|
||||||
return d
|
|
||||||
|
|
||||||
def _add_type_modules(self, *types_) -> None:
|
|
||||||
for t in types_:
|
|
||||||
module = getattr(t, "__module__", None)
|
|
||||||
if not module:
|
|
||||||
return
|
|
||||||
self.ensure_module_imported(module)
|
|
||||||
args = getattr(t, "__args__", ())
|
|
||||||
if args:
|
|
||||||
self._add_type_modules(*args)
|
|
||||||
constraints = getattr(t, "__constraints__", ())
|
|
||||||
if constraints:
|
|
||||||
self._add_type_modules(*constraints)
|
|
||||||
|
|
||||||
def ensure_module_imported(self, module: str) -> None:
|
|
||||||
if module not in self.modules:
|
|
||||||
self.modules.add(module)
|
|
||||||
self.add_line(f"if '{module}' not in globals():")
|
|
||||||
with self.indent():
|
|
||||||
self.add_line(f"import {module}")
|
|
||||||
root_module = module.split(".")[0]
|
|
||||||
if root_module not in self.globals:
|
|
||||||
self.globals.add(root_module)
|
|
||||||
self.add_line("else:")
|
|
||||||
with self.indent():
|
|
||||||
self.add_line(f"global {root_module}")
|
|
||||||
|
|
||||||
def add_line(self, line) -> None:
|
|
||||||
self.lines.append(f"{self._current_indent}{line}")
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def indent(self) -> typing.Generator[None, None, None]:
|
|
||||||
self._current_indent += " " * 4
|
|
||||||
try:
|
|
||||||
yield
|
|
||||||
finally:
|
|
||||||
self._current_indent = self._current_indent[:-4]
|
|
||||||
|
|
||||||
def compile(self) -> None:
|
|
||||||
exec("\n".join(self.lines), globals(), self.__dict__)
|
|
||||||
|
|
||||||
def get_declared_hook(self, method_name: str):
|
|
||||||
if not hasattr(self.cls, method_name):
|
|
||||||
return
|
|
||||||
cls = get_class_that_define_method(method_name, self.cls)
|
|
||||||
if type_name(cls) != DataClassDictMixinPath:
|
|
||||||
return cls.__dict__[method_name]
|
|
||||||
|
|
||||||
def add_from_dict(self) -> None:
|
|
||||||
|
|
||||||
self.reset()
|
|
||||||
self.add_line("@classmethod")
|
|
||||||
self.add_line(
|
|
||||||
"def from_dict(cls, d, use_bytes=False, use_enum=False, "
|
|
||||||
"use_datetime=False, options=None):"
|
|
||||||
)
|
|
||||||
with self.indent():
|
|
||||||
pre_deserialize = self.get_declared_hook(__PRE_DESERIALIZE__)
|
|
||||||
if pre_deserialize:
|
|
||||||
if not isinstance(pre_deserialize, classmethod):
|
|
||||||
raise BadHookSignature(
|
|
||||||
f"`{__PRE_DESERIALIZE__}` must be a class method with "
|
|
||||||
f"Callable[[Dict[Any, Any]], Dict[Any, Any]] signature"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
self.add_line(f"d = cls.{__PRE_DESERIALIZE__}(d, options=options)")
|
|
||||||
self.add_line("try:")
|
|
||||||
with self.indent():
|
|
||||||
self.add_line("kwargs = {}")
|
|
||||||
for fname, ftype in self.field_types.items():
|
|
||||||
metadata = self.metadatas.get(fname)
|
|
||||||
self._add_type_modules(ftype)
|
|
||||||
self.add_line(f"value = d.get('{fname}', MISSING)")
|
|
||||||
self.add_line("if value is None:")
|
|
||||||
with self.indent():
|
|
||||||
self.add_line(f"kwargs['{fname}'] = None")
|
|
||||||
self.add_line("else:")
|
|
||||||
with self.indent():
|
|
||||||
if self.defaults[fname] is MISSING:
|
|
||||||
self.add_line("if value is MISSING:")
|
|
||||||
with self.indent():
|
|
||||||
if isinstance(ftype, SerializationStrategy):
|
|
||||||
self.add_line(
|
|
||||||
f"raise MissingField('{fname}',"
|
|
||||||
f"{type_name(ftype.__class__)},cls)"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
self.add_line(
|
|
||||||
f"raise MissingField('{fname}',"
|
|
||||||
f"{type_name(ftype)},cls)"
|
|
||||||
)
|
|
||||||
self.add_line("else:")
|
|
||||||
with self.indent():
|
|
||||||
unpacked_value = self._unpack_field_value(
|
|
||||||
fname=fname,
|
|
||||||
ftype=ftype,
|
|
||||||
parent=self.cls,
|
|
||||||
metadata=metadata,
|
|
||||||
)
|
|
||||||
self.add_line("try:")
|
|
||||||
with self.indent():
|
|
||||||
self.add_line(
|
|
||||||
f"kwargs['{fname}'] = {unpacked_value}"
|
|
||||||
)
|
|
||||||
self.add_line("except Exception as e:")
|
|
||||||
with self.indent():
|
|
||||||
if isinstance(
|
|
||||||
ftype, SerializationStrategy
|
|
||||||
):
|
|
||||||
field_type = type_name(ftype.__class__)
|
|
||||||
else:
|
|
||||||
field_type = type_name(ftype)
|
|
||||||
self.add_line(
|
|
||||||
f"raise InvalidFieldValue('{fname}',"
|
|
||||||
f"{field_type},value,cls)"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
self.add_line("if value is not MISSING:")
|
|
||||||
with self.indent():
|
|
||||||
unpacked_value = self._unpack_field_value(
|
|
||||||
fname=fname,
|
|
||||||
ftype=ftype,
|
|
||||||
parent=self.cls,
|
|
||||||
metadata=metadata,
|
|
||||||
)
|
|
||||||
self.add_line("try:")
|
|
||||||
with self.indent():
|
|
||||||
self.add_line(
|
|
||||||
f"kwargs['{fname}'] = {unpacked_value}"
|
|
||||||
)
|
|
||||||
self.add_line("except Exception as e:")
|
|
||||||
with self.indent():
|
|
||||||
if isinstance(
|
|
||||||
ftype, SerializationStrategy
|
|
||||||
):
|
|
||||||
field_type = type_name(ftype.__class__)
|
|
||||||
else:
|
|
||||||
field_type = type_name(ftype)
|
|
||||||
self.add_line(
|
|
||||||
f"raise InvalidFieldValue('{fname}',"
|
|
||||||
f"{field_type},value,cls)"
|
|
||||||
)
|
|
||||||
self.add_line("except AttributeError:")
|
|
||||||
with self.indent():
|
|
||||||
self.add_line("if not isinstance(d, dict):")
|
|
||||||
with self.indent():
|
|
||||||
self.add_line(
|
|
||||||
f"raise ValueError('Argument for "
|
|
||||||
f"{type_name(self.cls)}.from_dict method "
|
|
||||||
f"should be a dict instance') from None"
|
|
||||||
)
|
|
||||||
self.add_line("else:")
|
|
||||||
with self.indent():
|
|
||||||
self.add_line("raise")
|
|
||||||
post_deserialize = self.get_declared_hook(__POST_DESERIALIZE__)
|
|
||||||
if post_deserialize:
|
|
||||||
if not isinstance(post_deserialize, classmethod):
|
|
||||||
raise BadHookSignature(
|
|
||||||
f"`{__POST_DESERIALIZE__}` must be a class method "
|
|
||||||
f"with Callable[[{type_name(self.cls)}], "
|
|
||||||
f"{type_name(self.cls)}] signature"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
self.add_line(
|
|
||||||
f"return cls.{__POST_DESERIALIZE__}(cls(**kwargs), options=options)"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
self.add_line("return cls(**kwargs)")
|
|
||||||
self.add_line("setattr(cls, 'from_dict', from_dict)")
|
|
||||||
self.compile()
|
|
||||||
|
|
||||||
def add_to_dict(self) -> None:
|
|
||||||
|
|
||||||
self.reset()
|
|
||||||
self.add_line(
|
|
||||||
"def to_dict(self, use_bytes=False, use_enum=False, "
|
|
||||||
"use_datetime=False, options=None):"
|
|
||||||
)
|
|
||||||
with self.indent():
|
|
||||||
pre_serialize = self.get_declared_hook(__PRE_SERIALIZE__)
|
|
||||||
if pre_serialize:
|
|
||||||
self.add_line(f"self = self.{__PRE_SERIALIZE__}(options=options)")
|
|
||||||
self.add_line("kwargs = {}")
|
|
||||||
for fname, ftype in self.field_types.items():
|
|
||||||
metadata = self.metadatas.get(fname)
|
|
||||||
self.add_line(f"value = getattr(self, '{fname}')")
|
|
||||||
self.add_line("if value is None:")
|
|
||||||
with self.indent():
|
|
||||||
self.add_line(f"kwargs['{fname}'] = None")
|
|
||||||
self.add_line("else:")
|
|
||||||
with self.indent():
|
|
||||||
packed_value = self._pack_value(
|
|
||||||
fname=fname,
|
|
||||||
ftype=ftype,
|
|
||||||
parent=self.cls,
|
|
||||||
metadata=metadata,
|
|
||||||
)
|
|
||||||
self.add_line(f"kwargs['{fname}'] = {packed_value}")
|
|
||||||
post_serialize = self.get_declared_hook(__POST_SERIALIZE__)
|
|
||||||
if post_serialize:
|
|
||||||
self.add_line(f"return self.{__POST_SERIALIZE__}(kwargs, options=options)")
|
|
||||||
else:
|
|
||||||
self.add_line("return kwargs")
|
|
||||||
self.add_line("setattr(cls, 'to_dict', to_dict)")
|
|
||||||
self.compile()
|
|
||||||
|
|
||||||
def add_pack_union(self, fname, ftype, parent, variant_types, value_name, metadata):
|
|
||||||
self._add_type_modules(*variant_types)
|
|
||||||
self.add_line(f'def resolve_union(value):')
|
|
||||||
with self.indent():
|
|
||||||
for variant in variant_types:
|
|
||||||
if is_generic(variant):
|
|
||||||
variant_name = get_type_origin(variant).__name__
|
|
||||||
else:
|
|
||||||
variant_name = type_name(variant)
|
|
||||||
self.add_line(f'if isinstance(value, {variant_name}):')
|
|
||||||
with self.indent():
|
|
||||||
self.add_line('try:')
|
|
||||||
with self.indent():
|
|
||||||
packed = self._pack_value(fname, variant, parent, value_name, metadata=metadata)
|
|
||||||
self.add_line(f'return {packed}')
|
|
||||||
self.add_line('except (TypeError, AttributeError, ValueError, LookupError) as e:')
|
|
||||||
with self.indent():
|
|
||||||
self.add_line('pass')
|
|
||||||
else:
|
|
||||||
variant_type_names = ", ".join([type_name(v) for v in variant_types])
|
|
||||||
self.add_line(f"raise ValueError('Union value could not be "
|
|
||||||
f"encoded using types ({variant_type_names})')")
|
|
||||||
return 'resolve_union(value)'
|
|
||||||
|
|
||||||
def _pack_value(
|
|
||||||
self, fname, ftype, parent, value_name="value", metadata=None
|
|
||||||
):
|
|
||||||
|
|
||||||
overridden: typing.Optional[str] = None
|
|
||||||
if metadata is not None:
|
|
||||||
serialize_option = metadata.get("serialize")
|
|
||||||
if callable(serialize_option):
|
|
||||||
setattr(
|
|
||||||
self.cls,
|
|
||||||
f"__{fname}_serialize",
|
|
||||||
staticmethod(serialize_option),
|
|
||||||
)
|
|
||||||
return f"self.__{fname}_serialize(self.{fname})"
|
|
||||||
|
|
||||||
if is_dataclass(ftype):
|
|
||||||
return f"{value_name}.to_dict(use_bytes, use_enum, use_datetime, options)"
|
|
||||||
|
|
||||||
with suppress(TypeError):
|
|
||||||
if issubclass(ftype, SerializableType):
|
|
||||||
return f"{value_name}._serialize()"
|
|
||||||
if isinstance(ftype, SerializationStrategy):
|
|
||||||
return (
|
|
||||||
f"self.__dataclass_fields__['{fname}'].type"
|
|
||||||
f"._serialize({value_name})"
|
|
||||||
)
|
|
||||||
if type_name(ftype) in self.cls._serializable_encoders:
|
|
||||||
return f"self._serializable_encoders['{type_name(ftype)}']._serialize({value_name})"
|
|
||||||
|
|
||||||
origin_type = get_type_origin(ftype)
|
|
||||||
if is_special_typing_primitive(origin_type):
|
|
||||||
if origin_type is typing.Any:
|
|
||||||
return overridden or value_name
|
|
||||||
elif is_union(ftype):
|
|
||||||
args = getattr(ftype, "__args__", ())
|
|
||||||
if len(args) == 2 and args[1] == NoneType: # it is Optional
|
|
||||||
return self._pack_value(fname, args[0], parent, metadata=metadata)
|
|
||||||
else:
|
|
||||||
return self.add_pack_union(fname, ftype, parent, args, value_name, metadata)
|
|
||||||
elif origin_type is typing.AnyStr:
|
|
||||||
raise UnserializableDataError(
|
|
||||||
"AnyStr is not supported by mashumaro"
|
|
||||||
)
|
|
||||||
elif is_type_var(ftype):
|
|
||||||
raise UnserializableDataError(
|
|
||||||
"TypeVars are not supported by mashumaro"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
raise UnserializableDataError(
|
|
||||||
f"{ftype} as a field type is not supported by mashumaro"
|
|
||||||
)
|
|
||||||
elif origin_type in (bool, NoneType):
|
|
||||||
return overridden or value_name
|
|
||||||
elif issubclass(origin_type, typing.Collection):
|
|
||||||
args = getattr(ftype, "__args__", ())
|
|
||||||
|
|
||||||
def inner_expr(arg_num=0, v_name="value"):
|
|
||||||
return self._pack_value(fname, args[arg_num], parent, v_name, metadata=metadata)
|
|
||||||
|
|
||||||
if issubclass(
|
|
||||||
origin_type,
|
|
||||||
(typing.List, typing.Deque, typing.Tuple, typing.AbstractSet),
|
|
||||||
):
|
|
||||||
if is_generic(ftype):
|
|
||||||
return (
|
|
||||||
overridden
|
|
||||||
or f"[{inner_expr()} for value in {value_name}]"
|
|
||||||
)
|
|
||||||
elif ftype is list:
|
|
||||||
raise UnserializableField(
|
|
||||||
fname, ftype, parent, "Use typing.List[T] instead"
|
|
||||||
)
|
|
||||||
elif ftype is collections.deque:
|
|
||||||
raise UnserializableField(
|
|
||||||
fname, ftype, parent, "Use typing.Deque[T] instead"
|
|
||||||
)
|
|
||||||
elif ftype is tuple:
|
|
||||||
raise UnserializableField(
|
|
||||||
fname, ftype, parent, "Use typing.Tuple[T] instead"
|
|
||||||
)
|
|
||||||
elif ftype is set:
|
|
||||||
raise UnserializableField(
|
|
||||||
fname, ftype, parent, "Use typing.Set[T] instead"
|
|
||||||
)
|
|
||||||
elif ftype is frozenset:
|
|
||||||
raise UnserializableField(
|
|
||||||
fname, ftype, parent, "Use typing.FrozenSet[T] instead"
|
|
||||||
)
|
|
||||||
elif issubclass(origin_type, typing.ChainMap):
|
|
||||||
if ftype is collections.ChainMap:
|
|
||||||
raise UnserializableField(
|
|
||||||
fname,
|
|
||||||
ftype,
|
|
||||||
parent,
|
|
||||||
"Use typing.ChainMap[KT,VT] instead",
|
|
||||||
)
|
|
||||||
elif is_generic(ftype):
|
|
||||||
if is_dataclass(args[0]):
|
|
||||||
raise UnserializableDataError(
|
|
||||||
"ChainMaps with dataclasses as keys "
|
|
||||||
"are not supported by mashumaro"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
return (
|
|
||||||
overridden
|
|
||||||
or f'[{{{inner_expr(0,"key")}:{inner_expr(1)} '
|
|
||||||
f"for key,value in m.items()}} "
|
|
||||||
f"for m in value.maps]"
|
|
||||||
)
|
|
||||||
elif issubclass(origin_type, str):
|
|
||||||
return overridden or value_name
|
|
||||||
elif issubclass(origin_type, typing.Mapping):
|
|
||||||
if ftype is dict:
|
|
||||||
raise UnserializableField(
|
|
||||||
fname,
|
|
||||||
ftype,
|
|
||||||
parent,
|
|
||||||
"Use typing.Dict[KT,VT] or Mapping[KT,VT] instead",
|
|
||||||
)
|
|
||||||
elif is_generic(ftype):
|
|
||||||
if is_dataclass(args[0]):
|
|
||||||
raise UnserializableDataError(
|
|
||||||
"Mappings with dataclasses as keys "
|
|
||||||
"are not supported by mashumaro"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
return (
|
|
||||||
overridden
|
|
||||||
or f'{{{inner_expr(0,"key")}: {inner_expr(1)} '
|
|
||||||
f"for key, value in {value_name}.items()}}"
|
|
||||||
)
|
|
||||||
elif issubclass(origin_type, typing.ByteString):
|
|
||||||
specific = f"encodebytes({value_name}).decode()"
|
|
||||||
return (
|
|
||||||
f"{value_name} if use_bytes else {overridden or specific}"
|
|
||||||
)
|
|
||||||
elif issubclass(origin_type, typing.Sequence):
|
|
||||||
if is_generic(ftype):
|
|
||||||
return (
|
|
||||||
overridden
|
|
||||||
or f"[{inner_expr()} for value in {value_name}]"
|
|
||||||
)
|
|
||||||
elif issubclass(origin_type, os.PathLike):
|
|
||||||
return overridden or f"{value_name}.__fspath__()"
|
|
||||||
elif issubclass(origin_type, enum.Enum):
|
|
||||||
specific = f"{value_name}.value"
|
|
||||||
return f"{value_name} if use_enum else {overridden or specific}"
|
|
||||||
elif origin_type is int:
|
|
||||||
return overridden or f"int({value_name})"
|
|
||||||
elif origin_type is float:
|
|
||||||
return overridden or f"float({value_name})"
|
|
||||||
elif origin_type in (datetime.datetime, datetime.date, datetime.time):
|
|
||||||
if overridden:
|
|
||||||
return f"{value_name} if use_datetime else {overridden}"
|
|
||||||
return (
|
|
||||||
f"{value_name} if use_datetime else {value_name}.isoformat()"
|
|
||||||
)
|
|
||||||
elif origin_type is datetime.timedelta:
|
|
||||||
return overridden or f"{value_name}.total_seconds()"
|
|
||||||
elif origin_type is datetime.timezone:
|
|
||||||
return overridden or f"{value_name}.tzname(None)"
|
|
||||||
elif origin_type is uuid.UUID:
|
|
||||||
return overridden or f"str({value_name})"
|
|
||||||
elif origin_type in [
|
|
||||||
ipaddress.IPv4Address,
|
|
||||||
ipaddress.IPv6Address,
|
|
||||||
ipaddress.IPv4Network,
|
|
||||||
ipaddress.IPv6Network,
|
|
||||||
ipaddress.IPv4Interface,
|
|
||||||
ipaddress.IPv6Interface,
|
|
||||||
]:
|
|
||||||
return overridden or f"str({value_name})"
|
|
||||||
elif origin_type is Decimal:
|
|
||||||
return overridden or f"str({value_name})"
|
|
||||||
elif origin_type is Fraction:
|
|
||||||
return overridden or f"str({value_name})"
|
|
||||||
|
|
||||||
raise UnserializableDataError(f"_unpack_value: fname: {fname}, ftype: {ftype}, parent: {parent}, origin_type: {origin_type}")
|
|
||||||
|
|
||||||
def add_unpack_union(self, fname, ftype, parent, variant_types, value_name, metadata):
|
|
||||||
self.add_line(f'def resolve_union(value):')
|
|
||||||
with self.indent():
|
|
||||||
for variant in variant_types:
|
|
||||||
if is_generic(variant):
|
|
||||||
variant_name = get_type_origin(variant).__name__
|
|
||||||
else:
|
|
||||||
variant_name = type_name(variant)
|
|
||||||
self.add_line('try:')
|
|
||||||
with self.indent():
|
|
||||||
packed = self._unpack_field_value(fname, variant, parent, value_name)
|
|
||||||
self.add_line(f'packed = {packed}')
|
|
||||||
self.add_line(f'if isinstance(packed, {variant_name}):')
|
|
||||||
with self.indent():
|
|
||||||
self.add_line(f'return packed')
|
|
||||||
self.add_line('except (TypeError, AttributeError, ValueError, LookupError) as e:')
|
|
||||||
with self.indent():
|
|
||||||
self.add_line('pass')
|
|
||||||
else:
|
|
||||||
variant_type_names = ", ".join([type_name(v) for v in variant_types])
|
|
||||||
self.add_line(f"raise ValueError('Union value could not be "
|
|
||||||
f"decoded using types ({variant_type_names})')")
|
|
||||||
return 'resolve_union(value)'
|
|
||||||
|
|
||||||
def _unpack_field_value(
|
|
||||||
self, fname, ftype, parent, value_name="value", metadata=None
|
|
||||||
):
|
|
||||||
|
|
||||||
deserialize_option: typing.Optional[typing.Any] = None
|
|
||||||
overridden: typing.Optional[str] = None
|
|
||||||
if metadata is not None:
|
|
||||||
deserialize_option = metadata.get("deserialize")
|
|
||||||
if callable(deserialize_option):
|
|
||||||
setattr(self.cls, f"__{fname}_deserialize", deserialize_option)
|
|
||||||
return f"cls.__{fname}_deserialize({value_name})"
|
|
||||||
|
|
||||||
if is_dataclass(ftype):
|
|
||||||
return (
|
|
||||||
f"{type_name(ftype)}.from_dict({value_name}, "
|
|
||||||
f"use_bytes, use_enum, use_datetime, options)"
|
|
||||||
)
|
|
||||||
|
|
||||||
with suppress(TypeError):
|
|
||||||
if issubclass(ftype, SerializableType):
|
|
||||||
return f"{type_name(ftype)}._deserialize({value_name})"
|
|
||||||
if isinstance(ftype, SerializationStrategy):
|
|
||||||
return (
|
|
||||||
f"cls.__dataclass_fields__['{fname}'].type"
|
|
||||||
f"._deserialize({value_name})"
|
|
||||||
)
|
|
||||||
if type_name(ftype) in self.cls._serializable_encoders:
|
|
||||||
return f"cls._serializable_encoders['{type_name(ftype)}']._deserialize({value_name})"
|
|
||||||
|
|
||||||
origin_type = get_type_origin(ftype)
|
|
||||||
if is_special_typing_primitive(origin_type):
|
|
||||||
if origin_type is typing.Any:
|
|
||||||
return overridden or value_name
|
|
||||||
elif is_union(ftype):
|
|
||||||
args = getattr(ftype, "__args__", ())
|
|
||||||
if len(args) == 2 and args[1] == NoneType: # it is Optional
|
|
||||||
return self._unpack_field_value(
|
|
||||||
fname, args[0], parent, metadata=metadata
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
return self.add_unpack_union(fname, ftype, parent, args, value_name, metadata)
|
|
||||||
elif origin_type is typing.AnyStr:
|
|
||||||
raise UnserializableDataError(
|
|
||||||
"AnyStr is not supported by mashumaro"
|
|
||||||
)
|
|
||||||
elif is_type_var(ftype):
|
|
||||||
raise UnserializableDataError(
|
|
||||||
"TypeVars are not supported by mashumaro"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
raise UnserializableDataError(
|
|
||||||
f"{ftype} as a field type is not supported by mashumaro"
|
|
||||||
)
|
|
||||||
elif origin_type in (bool, NoneType):
|
|
||||||
return overridden or value_name
|
|
||||||
elif issubclass(origin_type, typing.Collection):
|
|
||||||
args = getattr(ftype, "__args__", ())
|
|
||||||
|
|
||||||
def inner_expr(arg_num=0, v_name="value"):
|
|
||||||
return self._unpack_field_value(
|
|
||||||
fname, args[arg_num], parent, v_name
|
|
||||||
)
|
|
||||||
|
|
||||||
if issubclass(origin_type, typing.List):
|
|
||||||
if is_generic(ftype):
|
|
||||||
return (
|
|
||||||
overridden
|
|
||||||
or f"[{inner_expr()} for value in {value_name}]"
|
|
||||||
)
|
|
||||||
elif ftype is list:
|
|
||||||
raise UnserializableField(
|
|
||||||
fname, ftype, parent, "Use typing.List[T] instead"
|
|
||||||
)
|
|
||||||
elif issubclass(origin_type, typing.Deque):
|
|
||||||
if is_generic(ftype):
|
|
||||||
return (
|
|
||||||
overridden
|
|
||||||
or f"collections.deque([{inner_expr()} "
|
|
||||||
f"for value in {value_name}])"
|
|
||||||
)
|
|
||||||
elif ftype is collections.deque:
|
|
||||||
raise UnserializableField(
|
|
||||||
fname, ftype, parent, "Use typing.Deque[T] instead"
|
|
||||||
)
|
|
||||||
elif issubclass(origin_type, typing.Tuple):
|
|
||||||
if is_generic(ftype):
|
|
||||||
return (
|
|
||||||
overridden
|
|
||||||
or f"tuple([{inner_expr()} for value in {value_name}])"
|
|
||||||
)
|
|
||||||
elif ftype is tuple:
|
|
||||||
raise UnserializableField(
|
|
||||||
fname, ftype, parent, "Use typing.Tuple[T] instead"
|
|
||||||
)
|
|
||||||
elif issubclass(origin_type, typing.FrozenSet):
|
|
||||||
if is_generic(ftype):
|
|
||||||
return (
|
|
||||||
overridden
|
|
||||||
or f"frozenset([{inner_expr()} "
|
|
||||||
f"for value in {value_name}])"
|
|
||||||
)
|
|
||||||
elif ftype is frozenset:
|
|
||||||
raise UnserializableField(
|
|
||||||
fname, ftype, parent, "Use typing.FrozenSet[T] instead"
|
|
||||||
)
|
|
||||||
elif issubclass(origin_type, typing.AbstractSet):
|
|
||||||
if is_generic(ftype):
|
|
||||||
return (
|
|
||||||
overridden
|
|
||||||
or f"set([{inner_expr()} for value in {value_name}])"
|
|
||||||
)
|
|
||||||
elif ftype is set:
|
|
||||||
raise UnserializableField(
|
|
||||||
fname, ftype, parent, "Use typing.Set[T] instead"
|
|
||||||
)
|
|
||||||
elif issubclass(origin_type, typing.ChainMap):
|
|
||||||
if ftype is collections.ChainMap:
|
|
||||||
raise UnserializableField(
|
|
||||||
fname,
|
|
||||||
ftype,
|
|
||||||
parent,
|
|
||||||
"Use typing.ChainMap[KT,VT] instead",
|
|
||||||
)
|
|
||||||
elif is_generic(ftype):
|
|
||||||
if is_dataclass(args[0]):
|
|
||||||
raise UnserializableDataError(
|
|
||||||
"ChainMaps with dataclasses as keys "
|
|
||||||
"are not supported by mashumaro"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
return (
|
|
||||||
overridden
|
|
||||||
or f"collections.ChainMap("
|
|
||||||
f'*[{{{inner_expr(0,"key")}:{inner_expr(1)} '
|
|
||||||
f"for key, value in m.items()}} "
|
|
||||||
f"for m in {value_name}])"
|
|
||||||
)
|
|
||||||
elif issubclass(origin_type, str):
|
|
||||||
return overridden or value_name
|
|
||||||
elif issubclass(origin_type, typing.Mapping):
|
|
||||||
if ftype is dict:
|
|
||||||
raise UnserializableField(
|
|
||||||
fname,
|
|
||||||
ftype,
|
|
||||||
parent,
|
|
||||||
"Use typing.Dict[KT,VT] or Mapping[KT,VT] instead",
|
|
||||||
)
|
|
||||||
elif is_generic(ftype):
|
|
||||||
if is_dataclass(args[0]):
|
|
||||||
raise UnserializableDataError(
|
|
||||||
"Mappings with dataclasses as keys "
|
|
||||||
"are not supported by mashumaro"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
return (
|
|
||||||
overridden
|
|
||||||
or f'{{{inner_expr(0,"key")}: {inner_expr(1)} '
|
|
||||||
f"for key, value in {value_name}.items()}}"
|
|
||||||
)
|
|
||||||
elif issubclass(origin_type, typing.ByteString):
|
|
||||||
if origin_type is bytes:
|
|
||||||
specific = f"decodebytes({value_name}.encode())"
|
|
||||||
return (
|
|
||||||
f"{value_name} if use_bytes else "
|
|
||||||
f"{overridden or specific}"
|
|
||||||
)
|
|
||||||
elif origin_type is bytearray:
|
|
||||||
if overridden:
|
|
||||||
overridden = (
|
|
||||||
f"bytearray({value_name}) if use_bytes else "
|
|
||||||
f"{overridden}"
|
|
||||||
)
|
|
||||||
specific = (
|
|
||||||
f"bytearray({value_name} if use_bytes else "
|
|
||||||
f"decodebytes({value_name}.encode()))"
|
|
||||||
)
|
|
||||||
return overridden or specific
|
|
||||||
elif issubclass(origin_type, typing.Sequence):
|
|
||||||
if is_generic(ftype):
|
|
||||||
return (
|
|
||||||
overridden
|
|
||||||
or f"[{inner_expr()} for value in {value_name}]"
|
|
||||||
)
|
|
||||||
elif issubclass(origin_type, os.PathLike):
|
|
||||||
if overridden:
|
|
||||||
return overridden
|
|
||||||
elif issubclass(origin_type, pathlib.PosixPath):
|
|
||||||
return f"pathlib.PosixPath({value_name})"
|
|
||||||
elif issubclass(origin_type, pathlib.WindowsPath):
|
|
||||||
return f"pathlib.WindowsPath({value_name})"
|
|
||||||
elif issubclass(origin_type, pathlib.Path):
|
|
||||||
return f"pathlib.Path({value_name})"
|
|
||||||
elif issubclass(origin_type, pathlib.PurePosixPath):
|
|
||||||
return f"pathlib.PurePosixPath({value_name})"
|
|
||||||
elif issubclass(origin_type, pathlib.PureWindowsPath):
|
|
||||||
return f"pathlib.PureWindowsPath({value_name})"
|
|
||||||
elif issubclass(origin_type, pathlib.PurePath):
|
|
||||||
return f"pathlib.PurePath({value_name})"
|
|
||||||
elif origin_type is os.PathLike:
|
|
||||||
return f"pathlib.PurePath({value_name})"
|
|
||||||
else:
|
|
||||||
return f"{type_name(origin_type)}({value_name})"
|
|
||||||
elif issubclass(origin_type, enum.Enum):
|
|
||||||
specific = f"{type_name(origin_type)}({value_name})"
|
|
||||||
return f"{value_name} if use_enum else {overridden or specific}"
|
|
||||||
elif origin_type is int:
|
|
||||||
return overridden or f"int({value_name})"
|
|
||||||
elif origin_type is float:
|
|
||||||
return overridden or f"float({value_name})"
|
|
||||||
elif origin_type in (datetime.datetime, datetime.date, datetime.time):
|
|
||||||
if overridden:
|
|
||||||
return f"{value_name} if use_datetime else {overridden}"
|
|
||||||
elif deserialize_option is not None:
|
|
||||||
if deserialize_option == "ciso8601":
|
|
||||||
self.ensure_module_imported("ciso8601")
|
|
||||||
datetime_parser = "ciso8601.parse_datetime"
|
|
||||||
elif deserialize_option == "pendulum":
|
|
||||||
self.ensure_module_imported("pendulum")
|
|
||||||
datetime_parser = "pendulum.parse"
|
|
||||||
else:
|
|
||||||
raise UnserializableField(
|
|
||||||
fname,
|
|
||||||
ftype,
|
|
||||||
parent,
|
|
||||||
f"Unsupported deserialization engine "
|
|
||||||
f'"{deserialize_option}"',
|
|
||||||
)
|
|
||||||
suffix = ""
|
|
||||||
if origin_type is datetime.date:
|
|
||||||
suffix = ".date()"
|
|
||||||
elif origin_type is datetime.time:
|
|
||||||
suffix = ".time()"
|
|
||||||
return (
|
|
||||||
f"{value_name} if use_datetime else "
|
|
||||||
f"{datetime_parser}({value_name}){suffix}"
|
|
||||||
)
|
|
||||||
return (
|
|
||||||
f"{value_name} if use_datetime else "
|
|
||||||
f"datetime.{origin_type.__name__}."
|
|
||||||
f"fromisoformat({value_name})"
|
|
||||||
)
|
|
||||||
elif origin_type is datetime.timedelta:
|
|
||||||
return overridden or f"datetime.timedelta(seconds={value_name})"
|
|
||||||
elif origin_type is datetime.timezone:
|
|
||||||
return overridden or f"parse_timezone({value_name})"
|
|
||||||
elif origin_type is uuid.UUID:
|
|
||||||
return overridden or f"uuid.UUID({value_name})"
|
|
||||||
elif origin_type is ipaddress.IPv4Address:
|
|
||||||
return overridden or f"ipaddress.IPv4Address({value_name})"
|
|
||||||
elif origin_type is ipaddress.IPv6Address:
|
|
||||||
return overridden or f"ipaddress.IPv6Address({value_name})"
|
|
||||||
elif origin_type is ipaddress.IPv4Network:
|
|
||||||
return overridden or f"ipaddress.IPv4Network({value_name})"
|
|
||||||
elif origin_type is ipaddress.IPv6Network:
|
|
||||||
return overridden or f"ipaddress.IPv6Network({value_name})"
|
|
||||||
elif origin_type is ipaddress.IPv4Interface:
|
|
||||||
return overridden or f"ipaddress.IPv4Interface({value_name})"
|
|
||||||
elif origin_type is ipaddress.IPv6Interface:
|
|
||||||
return overridden or f"ipaddress.IPv6Interface({value_name})"
|
|
||||||
elif origin_type is Decimal:
|
|
||||||
return overridden or f"Decimal({value_name})"
|
|
||||||
elif origin_type is Fraction:
|
|
||||||
return overridden or f"Fraction({value_name})"
|
|
||||||
|
|
||||||
raise UnserializableField(fname, ftype, parent)
|
|
||||||
@@ -1,53 +0,0 @@
|
|||||||
import json
|
|
||||||
from types import MappingProxyType
|
|
||||||
from typing import Any, Dict, Mapping, Type, TypeVar, Union
|
|
||||||
|
|
||||||
from typing_extensions import Protocol
|
|
||||||
|
|
||||||
from mashumaro.serializer.base import DataClassDictMixin
|
|
||||||
|
|
||||||
DEFAULT_DICT_PARAMS = {
|
|
||||||
"use_bytes": False,
|
|
||||||
"use_enum": False,
|
|
||||||
"use_datetime": False,
|
|
||||||
}
|
|
||||||
EncodedData = Union[str, bytes, bytearray]
|
|
||||||
T = TypeVar("T", bound="DataClassJSONMixin")
|
|
||||||
|
|
||||||
|
|
||||||
class Encoder(Protocol): # pragma no cover
|
|
||||||
def __call__(self, obj, **kwargs) -> EncodedData:
|
|
||||||
...
|
|
||||||
|
|
||||||
|
|
||||||
class Decoder(Protocol): # pragma no cover
|
|
||||||
def __call__(self, s: EncodedData, **kwargs) -> Dict[Any, Any]:
|
|
||||||
...
|
|
||||||
|
|
||||||
|
|
||||||
class DataClassJSONMixin(DataClassDictMixin):
|
|
||||||
def to_json(
|
|
||||||
self: T,
|
|
||||||
encoder: Encoder = json.dumps,
|
|
||||||
dict_params: Mapping = MappingProxyType({}),
|
|
||||||
**encoder_kwargs,
|
|
||||||
) -> EncodedData:
|
|
||||||
|
|
||||||
return encoder(
|
|
||||||
self.to_dict(**dict(DEFAULT_DICT_PARAMS, **dict_params)),
|
|
||||||
**encoder_kwargs,
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_json(
|
|
||||||
cls: Type[T],
|
|
||||||
data: EncodedData,
|
|
||||||
decoder: Decoder = json.loads,
|
|
||||||
dict_params: Mapping = MappingProxyType({}),
|
|
||||||
**decoder_kwargs,
|
|
||||||
) -> T:
|
|
||||||
|
|
||||||
return cls.from_dict(
|
|
||||||
decoder(data, **decoder_kwargs),
|
|
||||||
**dict(DEFAULT_DICT_PARAMS, **dict_params),
|
|
||||||
)
|
|
||||||
@@ -1,53 +0,0 @@
|
|||||||
from functools import partial
|
|
||||||
from types import MappingProxyType
|
|
||||||
from typing import Any, Dict, Mapping, Type, TypeVar, Union
|
|
||||||
|
|
||||||
import msgpack
|
|
||||||
from typing_extensions import Protocol
|
|
||||||
|
|
||||||
from mashumaro.serializer.base import DataClassDictMixin
|
|
||||||
|
|
||||||
DEFAULT_DICT_PARAMS = {
|
|
||||||
"use_bytes": True,
|
|
||||||
"use_enum": False,
|
|
||||||
"use_datetime": False,
|
|
||||||
}
|
|
||||||
EncodedData = Union[str, bytes, bytearray]
|
|
||||||
T = TypeVar("T", bound="DataClassMessagePackMixin")
|
|
||||||
|
|
||||||
|
|
||||||
class Encoder(Protocol): # pragma no cover
|
|
||||||
def __call__(self, o, **kwargs) -> EncodedData:
|
|
||||||
...
|
|
||||||
|
|
||||||
|
|
||||||
class Decoder(Protocol): # pragma no cover
|
|
||||||
def __call__(self, packed: EncodedData, **kwargs) -> Dict[Any, Any]:
|
|
||||||
...
|
|
||||||
|
|
||||||
|
|
||||||
class DataClassMessagePackMixin(DataClassDictMixin):
|
|
||||||
def to_msgpack(
|
|
||||||
self: T,
|
|
||||||
encoder: Encoder = partial(msgpack.packb, use_bin_type=True),
|
|
||||||
dict_params: Mapping = MappingProxyType({}),
|
|
||||||
**encoder_kwargs,
|
|
||||||
) -> EncodedData:
|
|
||||||
|
|
||||||
return encoder(
|
|
||||||
self.to_dict(**dict(DEFAULT_DICT_PARAMS, **dict_params)),
|
|
||||||
**encoder_kwargs,
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_msgpack(
|
|
||||||
cls: Type[T],
|
|
||||||
data: EncodedData,
|
|
||||||
decoder: Decoder = partial(msgpack.unpackb, raw=False),
|
|
||||||
dict_params: Mapping = MappingProxyType({}),
|
|
||||||
**decoder_kwargs,
|
|
||||||
) -> T:
|
|
||||||
return cls.from_dict(
|
|
||||||
decoder(data, **decoder_kwargs),
|
|
||||||
**dict(DEFAULT_DICT_PARAMS, **dict_params),
|
|
||||||
)
|
|
||||||
@@ -1,43 +0,0 @@
|
|||||||
from types import MappingProxyType
|
|
||||||
from typing import Callable, Dict, Mapping, Type, TypeVar, Union
|
|
||||||
|
|
||||||
import yaml
|
|
||||||
|
|
||||||
from mashumaro.serializer.base import DataClassDictMixin
|
|
||||||
|
|
||||||
DEFAULT_DICT_PARAMS = {
|
|
||||||
"use_bytes": False,
|
|
||||||
"use_enum": False,
|
|
||||||
"use_datetime": False,
|
|
||||||
}
|
|
||||||
EncodedData = Union[str, bytes]
|
|
||||||
Encoder = Callable[[Dict], EncodedData]
|
|
||||||
Decoder = Callable[[EncodedData], Dict]
|
|
||||||
T = TypeVar("T", bound="DataClassYAMLMixin")
|
|
||||||
|
|
||||||
|
|
||||||
class DataClassYAMLMixin(DataClassDictMixin):
|
|
||||||
def to_yaml(
|
|
||||||
self: T,
|
|
||||||
encoder: Encoder = yaml.dump,
|
|
||||||
dict_params: Mapping = MappingProxyType({}),
|
|
||||||
**encoder_kwargs,
|
|
||||||
) -> EncodedData:
|
|
||||||
|
|
||||||
return encoder(
|
|
||||||
self.to_dict(**dict(DEFAULT_DICT_PARAMS, **dict_params)),
|
|
||||||
**encoder_kwargs,
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_yaml(
|
|
||||||
cls: Type[T],
|
|
||||||
data: EncodedData,
|
|
||||||
decoder: Decoder = yaml.safe_load,
|
|
||||||
dict_params: Mapping = MappingProxyType({}),
|
|
||||||
**decoder_kwargs,
|
|
||||||
) -> T:
|
|
||||||
return cls.from_dict(
|
|
||||||
decoder(data, **decoder_kwargs),
|
|
||||||
**dict(DEFAULT_DICT_PARAMS, **dict_params),
|
|
||||||
)
|
|
||||||
@@ -1,58 +0,0 @@
|
|||||||
# this file has been modified for use with dbt
|
|
||||||
import decimal
|
|
||||||
from typing import TypeVar, Generic
|
|
||||||
|
|
||||||
TV = TypeVar("TV")
|
|
||||||
|
|
||||||
class SerializableEncoder(Generic[TV]):
|
|
||||||
@classmethod
|
|
||||||
def _serialize(cls, value):
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _deserialize(cls, value):
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
class SerializableType:
|
|
||||||
def _serialize(self):
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _deserialize(cls, value):
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
|
|
||||||
class SerializationStrategy:
|
|
||||||
def _serialize(self, value):
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def _deserialize(self, value):
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
|
|
||||||
class RoundedDecimal(SerializationStrategy):
|
|
||||||
def __init__(self, places=None, rounding=None):
|
|
||||||
if places is not None:
|
|
||||||
self.exp = decimal.Decimal((0, (1,), -places))
|
|
||||||
else:
|
|
||||||
self.exp = None
|
|
||||||
self.rounding = rounding
|
|
||||||
|
|
||||||
def _serialize(self, value) -> str:
|
|
||||||
if self.exp:
|
|
||||||
if self.rounding:
|
|
||||||
return str(value.quantize(self.exp, rounding=self.rounding))
|
|
||||||
else:
|
|
||||||
return str(value.quantize(self.exp))
|
|
||||||
else:
|
|
||||||
return str(value)
|
|
||||||
|
|
||||||
def _deserialize(self, value: str) -> decimal.Decimal:
|
|
||||||
return decimal.Decimal(str(value))
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"SerializableType",
|
|
||||||
"SerializationStrategy",
|
|
||||||
"RoundedDecimal",
|
|
||||||
]
|
|
||||||
@@ -24,7 +24,7 @@ def read(fname):
|
|||||||
|
|
||||||
|
|
||||||
package_name = "dbt-core"
|
package_name = "dbt-core"
|
||||||
package_version = "0.19.1b2"
|
package_version = "0.19.2"
|
||||||
description = """dbt (data build tool) is a command line tool that helps \
|
description = """dbt (data build tool) is a command line tool that helps \
|
||||||
analysts and engineers transform data in their warehouse more effectively"""
|
analysts and engineers transform data in their warehouse more effectively"""
|
||||||
|
|
||||||
@@ -37,12 +37,7 @@ setup(
|
|||||||
author="Fishtown Analytics",
|
author="Fishtown Analytics",
|
||||||
author_email="info@fishtownanalytics.com",
|
author_email="info@fishtownanalytics.com",
|
||||||
url="https://github.com/fishtown-analytics/dbt",
|
url="https://github.com/fishtown-analytics/dbt",
|
||||||
packages=find_namespace_packages(include=[
|
packages=find_namespace_packages(include=['dbt', 'dbt.*']),
|
||||||
'dbt',
|
|
||||||
'dbt.*',
|
|
||||||
'mashumaro',
|
|
||||||
'mashumaro.*'
|
|
||||||
]),
|
|
||||||
package_data={
|
package_data={
|
||||||
'dbt': [
|
'dbt': [
|
||||||
'include/index.html',
|
'include/index.html',
|
||||||
@@ -70,21 +65,19 @@ setup(
|
|||||||
'networkx>=2.3,<3',
|
'networkx>=2.3,<3',
|
||||||
'minimal-snowplow-tracker==0.0.2',
|
'minimal-snowplow-tracker==0.0.2',
|
||||||
'colorama>=0.3.9,<0.4.4',
|
'colorama>=0.3.9,<0.4.4',
|
||||||
'agate>=1.6,<2',
|
'agate>=1.6,<1.6.2',
|
||||||
'isodate>=0.6,<0.7',
|
'isodate>=0.6,<0.7',
|
||||||
'json-rpc>=1.12,<2',
|
'json-rpc>=1.12,<2',
|
||||||
'werkzeug>=0.15,<2.0',
|
'werkzeug>=0.15,<2.0',
|
||||||
'dataclasses==0.6;python_version<"3.7"',
|
'dataclasses==0.6;python_version<"3.7"',
|
||||||
'hologram==0.0.13',
|
'hologram==0.0.13',
|
||||||
'logbook>=1.5,<1.6',
|
'logbook>=1.5,<1.6',
|
||||||
|
'mashumaro==2.0',
|
||||||
'typing-extensions>=3.7.4,<3.8',
|
'typing-extensions>=3.7.4,<3.8',
|
||||||
# the following are all to match snowflake-connector-python
|
# the following are all to match snowflake-connector-python
|
||||||
'requests>=2.18.0,<2.24.0',
|
'requests>=2.18.0,<2.24.0',
|
||||||
'idna<2.10',
|
'idna<2.10',
|
||||||
'cffi>=1.9,<1.15',
|
'cffi>=1.9,<1.15',
|
||||||
# the following are pulled in from mashumaro
|
|
||||||
"backports-datetime-fromisoformat;python_version=='3.6'",
|
|
||||||
"msgpack>=0.5.6",
|
|
||||||
],
|
],
|
||||||
zip_safe=False,
|
zip_safe=False,
|
||||||
classifiers=[
|
classifiers=[
|
||||||
|
|||||||
@@ -13,4 +13,3 @@ mypy==0.782
|
|||||||
wheel
|
wheel
|
||||||
twine
|
twine
|
||||||
pytest-logbook>=1.2.0,<1.3
|
pytest-logbook>=1.2.0,<1.3
|
||||||
jsonschema
|
|
||||||
|
|||||||
73
docker/requirements/requirements.0.19.1.txt
Normal file
73
docker/requirements/requirements.0.19.1.txt
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
agate==1.6.1
|
||||||
|
asn1crypto==1.4.0
|
||||||
|
attrs==20.3.0
|
||||||
|
azure-common==1.1.27
|
||||||
|
azure-core==1.12.0
|
||||||
|
azure-storage-blob==12.8.0
|
||||||
|
Babel==2.9.0
|
||||||
|
boto3==1.15.18
|
||||||
|
botocore==1.18.18
|
||||||
|
cachetools==4.2.1
|
||||||
|
certifi==2020.12.5
|
||||||
|
cffi==1.14.5
|
||||||
|
chardet==3.0.4
|
||||||
|
colorama==0.4.3
|
||||||
|
cryptography==3.4.7
|
||||||
|
decorator==4.4.2
|
||||||
|
google-api-core==1.23.0
|
||||||
|
google-auth==1.28.0
|
||||||
|
google-cloud-bigquery==2.3.1
|
||||||
|
google-cloud-core==1.4.4
|
||||||
|
google-crc32c==1.1.2
|
||||||
|
google-resumable-media==1.2.0
|
||||||
|
googleapis-common-protos==1.52.0
|
||||||
|
grpcio==1.36.1
|
||||||
|
hologram==0.0.13
|
||||||
|
idna==2.9
|
||||||
|
importlib-metadata==3.10.0
|
||||||
|
isodate==0.6.0
|
||||||
|
jeepney==0.6.0
|
||||||
|
Jinja2==2.11.2
|
||||||
|
jmespath==0.10.0
|
||||||
|
json-rpc==1.13.0
|
||||||
|
jsonschema==3.1.1
|
||||||
|
keyring==21.8.0
|
||||||
|
leather==0.3.3
|
||||||
|
Logbook==1.5.3
|
||||||
|
MarkupSafe==1.1.1
|
||||||
|
mashumaro==2.0
|
||||||
|
minimal-snowplow-tracker==0.0.2
|
||||||
|
msgpack==1.0.2
|
||||||
|
msrest==0.6.21
|
||||||
|
networkx==2.5
|
||||||
|
oauthlib==3.1.0
|
||||||
|
oscrypto==1.2.1
|
||||||
|
parsedatetime==2.6
|
||||||
|
proto-plus==1.18.1
|
||||||
|
protobuf==3.15.6
|
||||||
|
psycopg2-binary==2.8.6
|
||||||
|
pyasn1==0.4.8
|
||||||
|
pyasn1-modules==0.2.8
|
||||||
|
pycparser==2.20
|
||||||
|
pycryptodomex==3.10.1
|
||||||
|
PyJWT==1.7.1
|
||||||
|
pyOpenSSL==20.0.1
|
||||||
|
pyrsistent==0.17.3
|
||||||
|
python-dateutil==2.8.1
|
||||||
|
python-slugify==4.0.1
|
||||||
|
pytimeparse==1.1.8
|
||||||
|
pytz==2020.5
|
||||||
|
PyYAML==5.4.1
|
||||||
|
requests==2.23.0
|
||||||
|
requests-oauthlib==1.3.0
|
||||||
|
rsa==4.7.2
|
||||||
|
s3transfer==0.3.6
|
||||||
|
SecretStorage==3.3.1
|
||||||
|
six==1.15.0
|
||||||
|
snowflake-connector-python==2.3.6
|
||||||
|
sqlparse==0.3.1
|
||||||
|
text-unidecode==1.3
|
||||||
|
typing-extensions==3.7.4.3
|
||||||
|
urllib3==1.25.11
|
||||||
|
Werkzeug==1.0.1
|
||||||
|
zipp==3.4.1
|
||||||
73
docker/requirements/requirements.0.19.1b1.txt
Normal file
73
docker/requirements/requirements.0.19.1b1.txt
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
agate==1.6.1
|
||||||
|
asn1crypto==1.4.0
|
||||||
|
attrs==20.3.0
|
||||||
|
azure-common==1.1.26
|
||||||
|
azure-core==1.11.0
|
||||||
|
azure-storage-blob==12.7.1
|
||||||
|
Babel==2.9.0
|
||||||
|
boto3==1.15.18
|
||||||
|
botocore==1.18.18
|
||||||
|
cachetools==4.2.1
|
||||||
|
certifi==2020.12.5
|
||||||
|
cffi==1.14.5
|
||||||
|
chardet==3.0.4
|
||||||
|
colorama==0.4.3
|
||||||
|
cryptography==3.4.4
|
||||||
|
decorator==4.4.2
|
||||||
|
google-api-core==1.23.0
|
||||||
|
google-auth==1.26.1
|
||||||
|
google-cloud-bigquery==2.3.1
|
||||||
|
google-cloud-core==1.4.4
|
||||||
|
google-crc32c==1.1.2
|
||||||
|
google-resumable-media==1.2.0
|
||||||
|
googleapis-common-protos==1.52.0
|
||||||
|
grpcio==1.35.0
|
||||||
|
hologram==0.0.13
|
||||||
|
idna==2.9
|
||||||
|
importlib-metadata==3.4.0
|
||||||
|
isodate==0.6.0
|
||||||
|
jeepney==0.6.0
|
||||||
|
Jinja2==2.11.2
|
||||||
|
jmespath==0.10.0
|
||||||
|
json-rpc==1.13.0
|
||||||
|
jsonschema==3.1.1
|
||||||
|
keyring==21.8.0
|
||||||
|
leather==0.3.3
|
||||||
|
Logbook==1.5.3
|
||||||
|
MarkupSafe==1.1.1
|
||||||
|
mashumaro @ https://github.com/fishtown-analytics/dbt-mashumaro/archive/c25ed077e7982e2bfe8aee7df80bfeac63d3927f.tar.gz
|
||||||
|
minimal-snowplow-tracker==0.0.2
|
||||||
|
msgpack==1.0.2
|
||||||
|
msrest==0.6.21
|
||||||
|
networkx==2.5
|
||||||
|
oauthlib==3.1.0
|
||||||
|
oscrypto==1.2.1
|
||||||
|
parsedatetime==2.6
|
||||||
|
proto-plus==1.13.0
|
||||||
|
protobuf==3.14.0
|
||||||
|
psycopg2-binary==2.8.6
|
||||||
|
pyasn1==0.4.8
|
||||||
|
pyasn1-modules==0.2.8
|
||||||
|
pycparser==2.20
|
||||||
|
pycryptodomex==3.10.1
|
||||||
|
PyJWT==1.7.1
|
||||||
|
pyOpenSSL==20.0.1
|
||||||
|
pyrsistent==0.17.3
|
||||||
|
python-dateutil==2.8.1
|
||||||
|
python-slugify==4.0.1
|
||||||
|
pytimeparse==1.1.8
|
||||||
|
pytz==2020.5
|
||||||
|
PyYAML==5.4.1
|
||||||
|
requests==2.23.0
|
||||||
|
requests-oauthlib==1.3.0
|
||||||
|
rsa==4.7
|
||||||
|
s3transfer==0.3.4
|
||||||
|
SecretStorage==3.3.1
|
||||||
|
six==1.15.0
|
||||||
|
snowflake-connector-python==2.3.6
|
||||||
|
sqlparse==0.3.1
|
||||||
|
text-unidecode==1.3
|
||||||
|
typing-extensions==3.7.4.3
|
||||||
|
urllib3==1.25.11
|
||||||
|
Werkzeug==1.0.1
|
||||||
|
zipp==3.4.0
|
||||||
73
docker/requirements/requirements.0.19.1rc1.txt
Normal file
73
docker/requirements/requirements.0.19.1rc1.txt
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
agate==1.6.1
|
||||||
|
asn1crypto==1.4.0
|
||||||
|
attrs==20.3.0
|
||||||
|
azure-common==1.1.26
|
||||||
|
azure-core==1.12.0
|
||||||
|
azure-storage-blob==12.8.0
|
||||||
|
Babel==2.9.0
|
||||||
|
boto3==1.15.18
|
||||||
|
botocore==1.18.18
|
||||||
|
cachetools==4.2.1
|
||||||
|
certifi==2020.12.5
|
||||||
|
cffi==1.14.5
|
||||||
|
chardet==3.0.4
|
||||||
|
colorama==0.4.3
|
||||||
|
cryptography==3.4.6
|
||||||
|
decorator==4.4.2
|
||||||
|
google-api-core==1.23.0
|
||||||
|
google-auth==1.27.1
|
||||||
|
google-cloud-bigquery==2.3.1
|
||||||
|
google-cloud-core==1.4.4
|
||||||
|
google-crc32c==1.1.2
|
||||||
|
google-resumable-media==1.2.0
|
||||||
|
googleapis-common-protos==1.52.0
|
||||||
|
grpcio==1.36.1
|
||||||
|
hologram==0.0.13
|
||||||
|
idna==2.9
|
||||||
|
importlib-metadata==3.7.3
|
||||||
|
isodate==0.6.0
|
||||||
|
jeepney==0.6.0
|
||||||
|
Jinja2==2.11.2
|
||||||
|
jmespath==0.10.0
|
||||||
|
json-rpc==1.13.0
|
||||||
|
jsonschema==3.1.1
|
||||||
|
keyring==21.8.0
|
||||||
|
leather==0.3.3
|
||||||
|
Logbook==1.5.3
|
||||||
|
MarkupSafe==1.1.1
|
||||||
|
mashumaro==2.0
|
||||||
|
minimal-snowplow-tracker==0.0.2
|
||||||
|
msgpack==1.0.2
|
||||||
|
msrest==0.6.21
|
||||||
|
networkx==2.5
|
||||||
|
oauthlib==3.1.0
|
||||||
|
oscrypto==1.2.1
|
||||||
|
parsedatetime==2.6
|
||||||
|
proto-plus==1.17.0
|
||||||
|
protobuf==3.15.6
|
||||||
|
psycopg2-binary==2.8.6
|
||||||
|
pyasn1==0.4.8
|
||||||
|
pyasn1-modules==0.2.8
|
||||||
|
pycparser==2.20
|
||||||
|
pycryptodomex==3.10.1
|
||||||
|
PyJWT==1.7.1
|
||||||
|
pyOpenSSL==20.0.1
|
||||||
|
pyrsistent==0.17.3
|
||||||
|
python-dateutil==2.8.1
|
||||||
|
python-slugify==4.0.1
|
||||||
|
pytimeparse==1.1.8
|
||||||
|
pytz==2020.5
|
||||||
|
PyYAML==5.4.1
|
||||||
|
requests==2.23.0
|
||||||
|
requests-oauthlib==1.3.0
|
||||||
|
rsa==4.7.2
|
||||||
|
s3transfer==0.3.4
|
||||||
|
SecretStorage==3.3.1
|
||||||
|
six==1.15.0
|
||||||
|
snowflake-connector-python==2.3.6
|
||||||
|
sqlparse==0.3.1
|
||||||
|
text-unidecode==1.3
|
||||||
|
typing-extensions==3.7.4.3
|
||||||
|
urllib3==1.25.11
|
||||||
|
Werkzeug==1.0.1
|
||||||
|
zipp==3.4.1
|
||||||
73
docker/requirements/requirements.0.19.1rc2.txt
Normal file
73
docker/requirements/requirements.0.19.1rc2.txt
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
agate==1.6.1
|
||||||
|
asn1crypto==1.4.0
|
||||||
|
attrs==20.3.0
|
||||||
|
azure-common==1.1.26
|
||||||
|
azure-core==1.12.0
|
||||||
|
azure-storage-blob==12.8.0
|
||||||
|
Babel==2.9.0
|
||||||
|
boto3==1.15.18
|
||||||
|
botocore==1.18.18
|
||||||
|
cachetools==4.2.1
|
||||||
|
certifi==2020.12.5
|
||||||
|
cffi==1.14.5
|
||||||
|
chardet==3.0.4
|
||||||
|
colorama==0.4.3
|
||||||
|
cryptography==3.4.6
|
||||||
|
decorator==4.4.2
|
||||||
|
google-api-core==1.23.0
|
||||||
|
google-auth==1.28.0
|
||||||
|
google-cloud-bigquery==2.3.1
|
||||||
|
google-cloud-core==1.4.4
|
||||||
|
google-crc32c==1.1.2
|
||||||
|
google-resumable-media==1.2.0
|
||||||
|
googleapis-common-protos==1.52.0
|
||||||
|
grpcio==1.36.1
|
||||||
|
hologram==0.0.13
|
||||||
|
idna==2.9
|
||||||
|
importlib-metadata==3.7.3
|
||||||
|
isodate==0.6.0
|
||||||
|
jeepney==0.6.0
|
||||||
|
Jinja2==2.11.2
|
||||||
|
jmespath==0.10.0
|
||||||
|
json-rpc==1.13.0
|
||||||
|
jsonschema==3.1.1
|
||||||
|
keyring==21.8.0
|
||||||
|
leather==0.3.3
|
||||||
|
Logbook==1.5.3
|
||||||
|
MarkupSafe==1.1.1
|
||||||
|
mashumaro==2.0
|
||||||
|
minimal-snowplow-tracker==0.0.2
|
||||||
|
msgpack==1.0.2
|
||||||
|
msrest==0.6.21
|
||||||
|
networkx==2.5
|
||||||
|
oauthlib==3.1.0
|
||||||
|
oscrypto==1.2.1
|
||||||
|
parsedatetime==2.6
|
||||||
|
proto-plus==1.18.1
|
||||||
|
protobuf==3.15.6
|
||||||
|
psycopg2-binary==2.8.6
|
||||||
|
pyasn1==0.4.8
|
||||||
|
pyasn1-modules==0.2.8
|
||||||
|
pycparser==2.20
|
||||||
|
pycryptodomex==3.10.1
|
||||||
|
PyJWT==1.7.1
|
||||||
|
pyOpenSSL==20.0.1
|
||||||
|
pyrsistent==0.17.3
|
||||||
|
python-dateutil==2.8.1
|
||||||
|
python-slugify==4.0.1
|
||||||
|
pytimeparse==1.1.8
|
||||||
|
pytz==2020.5
|
||||||
|
PyYAML==5.4.1
|
||||||
|
requests==2.23.0
|
||||||
|
requests-oauthlib==1.3.0
|
||||||
|
rsa==4.7.2
|
||||||
|
s3transfer==0.3.6
|
||||||
|
SecretStorage==3.3.1
|
||||||
|
six==1.15.0
|
||||||
|
snowflake-connector-python==2.3.6
|
||||||
|
sqlparse==0.3.1
|
||||||
|
text-unidecode==1.3
|
||||||
|
typing-extensions==3.7.4.3
|
||||||
|
urllib3==1.25.11
|
||||||
|
Werkzeug==1.0.1
|
||||||
|
zipp==3.4.1
|
||||||
73
docker/requirements/requirements.0.19.2.txt
Normal file
73
docker/requirements/requirements.0.19.2.txt
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
agate==1.6.1
|
||||||
|
asn1crypto==1.4.0
|
||||||
|
attrs==21.2.0
|
||||||
|
azure-common==1.1.27
|
||||||
|
azure-core==1.15.0
|
||||||
|
azure-storage-blob==12.8.1
|
||||||
|
Babel==2.9.1
|
||||||
|
boto3==1.15.18
|
||||||
|
botocore==1.18.18
|
||||||
|
cachetools==4.2.2
|
||||||
|
certifi==2020.12.5
|
||||||
|
cffi==1.14.5
|
||||||
|
chardet==3.0.4
|
||||||
|
colorama==0.4.3
|
||||||
|
cryptography==3.4.7
|
||||||
|
decorator==4.4.2
|
||||||
|
google-api-core==1.23.0
|
||||||
|
google-auth==1.32.0
|
||||||
|
google-cloud-bigquery==2.3.1
|
||||||
|
google-cloud-core==1.4.4
|
||||||
|
google-crc32c==1.1.2
|
||||||
|
google-resumable-media==1.3.1
|
||||||
|
googleapis-common-protos==1.52.0
|
||||||
|
grpcio==1.38.1
|
||||||
|
hologram==0.0.13
|
||||||
|
idna==2.9
|
||||||
|
importlib-metadata==4.6.0
|
||||||
|
isodate==0.6.0
|
||||||
|
jeepney==0.6.0
|
||||||
|
Jinja2==2.11.2
|
||||||
|
jmespath==0.10.0
|
||||||
|
json-rpc==1.13.0
|
||||||
|
jsonschema==3.1.1
|
||||||
|
keyring==21.8.0
|
||||||
|
leather==0.3.3
|
||||||
|
Logbook==1.5.3
|
||||||
|
MarkupSafe==2.0.1
|
||||||
|
mashumaro==2.0
|
||||||
|
minimal-snowplow-tracker==0.0.2
|
||||||
|
msgpack==1.0.2
|
||||||
|
msrest==0.6.21
|
||||||
|
networkx==2.5.1
|
||||||
|
oauthlib==3.1.1
|
||||||
|
oscrypto==1.2.1
|
||||||
|
parsedatetime==2.6
|
||||||
|
proto-plus==1.18.1
|
||||||
|
protobuf==3.17.3
|
||||||
|
psycopg2-binary==2.9.1
|
||||||
|
pyasn1==0.4.8
|
||||||
|
pyasn1-modules==0.2.8
|
||||||
|
pycparser==2.20
|
||||||
|
pycryptodomex==3.10.1
|
||||||
|
PyJWT==1.7.1
|
||||||
|
pyOpenSSL==20.0.1
|
||||||
|
pyrsistent==0.18.0
|
||||||
|
python-dateutil==2.8.1
|
||||||
|
python-slugify==5.0.2
|
||||||
|
pytimeparse==1.1.8
|
||||||
|
pytz==2020.5
|
||||||
|
PyYAML==5.4.1
|
||||||
|
requests==2.23.0
|
||||||
|
requests-oauthlib==1.3.0
|
||||||
|
rsa==4.7.2
|
||||||
|
s3transfer==0.3.7
|
||||||
|
SecretStorage==3.3.1
|
||||||
|
six==1.16.0
|
||||||
|
snowflake-connector-python==2.3.6
|
||||||
|
sqlparse==0.3.1
|
||||||
|
text-unidecode==1.3
|
||||||
|
typing-extensions==3.7.4.3
|
||||||
|
urllib3==1.25.11
|
||||||
|
Werkzeug==1.0.1
|
||||||
|
zipp==3.4.1
|
||||||
73
docker/requirements/requirements.0.19.2rc1.txt
Normal file
73
docker/requirements/requirements.0.19.2rc1.txt
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
agate==1.6.1
|
||||||
|
asn1crypto==1.4.0
|
||||||
|
attrs==20.3.0
|
||||||
|
azure-common==1.1.27
|
||||||
|
azure-core==1.13.0
|
||||||
|
azure-storage-blob==12.8.1
|
||||||
|
Babel==2.9.1
|
||||||
|
boto3==1.15.18
|
||||||
|
botocore==1.18.18
|
||||||
|
cachetools==4.2.2
|
||||||
|
certifi==2020.12.5
|
||||||
|
cffi==1.14.5
|
||||||
|
chardet==3.0.4
|
||||||
|
colorama==0.4.3
|
||||||
|
cryptography==3.4.7
|
||||||
|
decorator==4.4.2
|
||||||
|
google-api-core==1.23.0
|
||||||
|
google-auth==1.30.0
|
||||||
|
google-cloud-bigquery==2.3.1
|
||||||
|
google-cloud-core==1.4.4
|
||||||
|
google-crc32c==1.1.2
|
||||||
|
google-resumable-media==1.2.0
|
||||||
|
googleapis-common-protos==1.52.0
|
||||||
|
grpcio==1.37.0
|
||||||
|
hologram==0.0.13
|
||||||
|
idna==2.9
|
||||||
|
importlib-metadata==4.0.1
|
||||||
|
isodate==0.6.0
|
||||||
|
jeepney==0.6.0
|
||||||
|
Jinja2==2.11.2
|
||||||
|
jmespath==0.10.0
|
||||||
|
json-rpc==1.13.0
|
||||||
|
jsonschema==3.1.1
|
||||||
|
keyring==21.8.0
|
||||||
|
leather==0.3.3
|
||||||
|
Logbook==1.5.3
|
||||||
|
MarkupSafe==1.1.1
|
||||||
|
mashumaro==2.0
|
||||||
|
minimal-snowplow-tracker==0.0.2
|
||||||
|
msgpack==1.0.2
|
||||||
|
msrest==0.6.21
|
||||||
|
networkx==2.5.1
|
||||||
|
oauthlib==3.1.0
|
||||||
|
oscrypto==1.2.1
|
||||||
|
parsedatetime==2.6
|
||||||
|
proto-plus==1.18.1
|
||||||
|
protobuf==3.15.8
|
||||||
|
psycopg2-binary==2.8.6
|
||||||
|
pyasn1==0.4.8
|
||||||
|
pyasn1-modules==0.2.8
|
||||||
|
pycparser==2.20
|
||||||
|
pycryptodomex==3.10.1
|
||||||
|
PyJWT==1.7.1
|
||||||
|
pyOpenSSL==20.0.1
|
||||||
|
pyrsistent==0.17.3
|
||||||
|
python-dateutil==2.8.1
|
||||||
|
python-slugify==4.0.1
|
||||||
|
pytimeparse==1.1.8
|
||||||
|
pytz==2020.5
|
||||||
|
PyYAML==5.4.1
|
||||||
|
requests==2.23.0
|
||||||
|
requests-oauthlib==1.3.0
|
||||||
|
rsa==4.7.2
|
||||||
|
s3transfer==0.3.7
|
||||||
|
SecretStorage==3.3.1
|
||||||
|
six==1.15.0
|
||||||
|
snowflake-connector-python==2.3.6
|
||||||
|
sqlparse==0.3.1
|
||||||
|
text-unidecode==1.3
|
||||||
|
typing-extensions==3.7.4.3
|
||||||
|
urllib3==1.25.11
|
||||||
|
Werkzeug==1.0.1
|
||||||
|
zipp==3.4.1
|
||||||
73
docker/requirements/requirements.0.19.2rc2.txt
Normal file
73
docker/requirements/requirements.0.19.2rc2.txt
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
agate==1.6.1
|
||||||
|
asn1crypto==1.4.0
|
||||||
|
attrs==21.2.0
|
||||||
|
azure-common==1.1.27
|
||||||
|
azure-core==1.14.0
|
||||||
|
azure-storage-blob==12.8.1
|
||||||
|
Babel==2.9.1
|
||||||
|
boto3==1.15.18
|
||||||
|
botocore==1.18.18
|
||||||
|
cachetools==4.2.2
|
||||||
|
certifi==2020.12.5
|
||||||
|
cffi==1.14.5
|
||||||
|
chardet==3.0.4
|
||||||
|
colorama==0.4.3
|
||||||
|
cryptography==3.4.7
|
||||||
|
decorator==4.4.2
|
||||||
|
google-api-core==1.23.0
|
||||||
|
google-auth==1.30.1
|
||||||
|
google-cloud-bigquery==2.3.1
|
||||||
|
google-cloud-core==1.4.4
|
||||||
|
google-crc32c==1.1.2
|
||||||
|
google-resumable-media==1.3.0
|
||||||
|
googleapis-common-protos==1.52.0
|
||||||
|
grpcio==1.38.0
|
||||||
|
hologram==0.0.13
|
||||||
|
idna==2.9
|
||||||
|
importlib-metadata==4.4.0
|
||||||
|
isodate==0.6.0
|
||||||
|
jeepney==0.6.0
|
||||||
|
Jinja2==2.11.2
|
||||||
|
jmespath==0.10.0
|
||||||
|
json-rpc==1.13.0
|
||||||
|
jsonschema==3.1.1
|
||||||
|
keyring==21.8.0
|
||||||
|
leather==0.3.3
|
||||||
|
Logbook==1.5.3
|
||||||
|
MarkupSafe==2.0.1
|
||||||
|
mashumaro==2.0
|
||||||
|
minimal-snowplow-tracker==0.0.2
|
||||||
|
msgpack==1.0.2
|
||||||
|
msrest==0.6.21
|
||||||
|
networkx==2.5.1
|
||||||
|
oauthlib==3.1.1
|
||||||
|
oscrypto==1.2.1
|
||||||
|
parsedatetime==2.6
|
||||||
|
proto-plus==1.18.1
|
||||||
|
protobuf==3.17.2
|
||||||
|
psycopg2-binary==2.8.6
|
||||||
|
pyasn1==0.4.8
|
||||||
|
pyasn1-modules==0.2.8
|
||||||
|
pycparser==2.20
|
||||||
|
pycryptodomex==3.10.1
|
||||||
|
PyJWT==1.7.1
|
||||||
|
pyOpenSSL==20.0.1
|
||||||
|
pyrsistent==0.17.3
|
||||||
|
python-dateutil==2.8.1
|
||||||
|
python-slugify==5.0.2
|
||||||
|
pytimeparse==1.1.8
|
||||||
|
pytz==2020.5
|
||||||
|
PyYAML==5.4.1
|
||||||
|
requests==2.23.0
|
||||||
|
requests-oauthlib==1.3.0
|
||||||
|
rsa==4.7.2
|
||||||
|
s3transfer==0.3.7
|
||||||
|
SecretStorage==3.3.1
|
||||||
|
six==1.16.0
|
||||||
|
snowflake-connector-python==2.3.6
|
||||||
|
sqlparse==0.3.1
|
||||||
|
text-unidecode==1.3
|
||||||
|
typing-extensions==3.7.4.3
|
||||||
|
urllib3==1.25.11
|
||||||
|
Werkzeug==1.0.1
|
||||||
|
zipp==3.4.1
|
||||||
@@ -1 +1 @@
|
|||||||
version = '0.19.1b2'
|
version = '0.19.2'
|
||||||
|
|||||||
@@ -48,14 +48,14 @@ RETRYABLE_ERRORS = (
|
|||||||
|
|
||||||
|
|
||||||
@lru_cache()
|
@lru_cache()
|
||||||
def get_bigquery_defaults() -> Tuple[Any, Optional[str]]:
|
def get_bigquery_defaults(scopes=None) -> Tuple[Any, Optional[str]]:
|
||||||
"""
|
"""
|
||||||
Returns (credentials, project_id)
|
Returns (credentials, project_id)
|
||||||
|
|
||||||
project_id is returned available from the environment; otherwise None
|
project_id is returned available from the environment; otherwise None
|
||||||
"""
|
"""
|
||||||
# Cached, because the underlying implementation shells out, taking ~1s
|
# Cached, because the underlying implementation shells out, taking ~1s
|
||||||
return google.auth.default()
|
return google.auth.default(scopes=scopes)
|
||||||
|
|
||||||
|
|
||||||
class Priority(StrEnum):
|
class Priority(StrEnum):
|
||||||
@@ -112,15 +112,17 @@ class BigQueryCredentials(Credentials):
|
|||||||
return ('method', 'database', 'schema', 'location', 'priority',
|
return ('method', 'database', 'schema', 'location', 'priority',
|
||||||
'timeout_seconds', 'maximum_bytes_billed')
|
'timeout_seconds', 'maximum_bytes_billed')
|
||||||
|
|
||||||
def __post_init__(self):
|
@classmethod
|
||||||
|
def __pre_deserialize__(cls, d: Dict[Any, Any]) -> Dict[Any, Any]:
|
||||||
# We need to inject the correct value of the database (aka project) at
|
# We need to inject the correct value of the database (aka project) at
|
||||||
# this stage, ref
|
# this stage, ref
|
||||||
# https://github.com/fishtown-analytics/dbt/pull/2908#discussion_r532927436.
|
# https://github.com/fishtown-analytics/dbt/pull/2908#discussion_r532927436.
|
||||||
|
|
||||||
# `database` is an alias of `project` in BigQuery
|
# `database` is an alias of `project` in BigQuery
|
||||||
if self.database is None:
|
if 'database' not in d:
|
||||||
_, database = get_bigquery_defaults()
|
_, database = get_bigquery_defaults()
|
||||||
self.database = database
|
d['database'] = database
|
||||||
|
return d
|
||||||
|
|
||||||
|
|
||||||
class BigQueryConnectionManager(BaseConnectionManager):
|
class BigQueryConnectionManager(BaseConnectionManager):
|
||||||
@@ -201,7 +203,7 @@ class BigQueryConnectionManager(BaseConnectionManager):
|
|||||||
creds = GoogleServiceAccountCredentials.Credentials
|
creds = GoogleServiceAccountCredentials.Credentials
|
||||||
|
|
||||||
if method == BigQueryConnectionMethod.OAUTH:
|
if method == BigQueryConnectionMethod.OAUTH:
|
||||||
credentials, _ = get_bigquery_defaults()
|
credentials, _ = get_bigquery_defaults(scopes=cls.SCOPE)
|
||||||
return credentials
|
return credentials
|
||||||
|
|
||||||
elif method == BigQueryConnectionMethod.SERVICE_ACCOUNT:
|
elif method == BigQueryConnectionMethod.SERVICE_ACCOUNT:
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ except ImportError:
|
|||||||
|
|
||||||
|
|
||||||
package_name = "dbt-bigquery"
|
package_name = "dbt-bigquery"
|
||||||
package_version = "0.19.1b2"
|
package_version = "0.19.2"
|
||||||
description = """The bigquery adapter plugin for dbt (data build tool)"""
|
description = """The bigquery adapter plugin for dbt (data build tool)"""
|
||||||
|
|
||||||
this_directory = os.path.abspath(os.path.dirname(__file__))
|
this_directory = os.path.abspath(os.path.dirname(__file__))
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
version = '0.19.1b2'
|
version = '0.19.2'
|
||||||
|
|||||||
@@ -41,7 +41,7 @@ def _dbt_psycopg2_name():
|
|||||||
|
|
||||||
|
|
||||||
package_name = "dbt-postgres"
|
package_name = "dbt-postgres"
|
||||||
package_version = "0.19.1b2"
|
package_version = "0.19.2"
|
||||||
description = """The postgres adpter plugin for dbt (data build tool)"""
|
description = """The postgres adpter plugin for dbt (data build tool)"""
|
||||||
|
|
||||||
this_directory = os.path.abspath(os.path.dirname(__file__))
|
this_directory = os.path.abspath(os.path.dirname(__file__))
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
version = '0.19.1b2'
|
version = '0.19.2'
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ except ImportError:
|
|||||||
|
|
||||||
|
|
||||||
package_name = "dbt-redshift"
|
package_name = "dbt-redshift"
|
||||||
package_version = "0.19.1b2"
|
package_version = "0.19.2"
|
||||||
description = """The redshift adapter plugin for dbt (data build tool)"""
|
description = """The redshift adapter plugin for dbt (data build tool)"""
|
||||||
|
|
||||||
this_directory = os.path.abspath(os.path.dirname(__file__))
|
this_directory = os.path.abspath(os.path.dirname(__file__))
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
version = '0.19.1b2'
|
version = '0.19.2'
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ except ImportError:
|
|||||||
|
|
||||||
|
|
||||||
package_name = "dbt-snowflake"
|
package_name = "dbt-snowflake"
|
||||||
package_version = "0.19.1b2"
|
package_version = "0.19.2"
|
||||||
description = """The snowflake adapter plugin for dbt (data build tool)"""
|
description = """The snowflake adapter plugin for dbt (data build tool)"""
|
||||||
|
|
||||||
this_directory = os.path.abspath(os.path.dirname(__file__))
|
this_directory = os.path.abspath(os.path.dirname(__file__))
|
||||||
|
|||||||
@@ -318,7 +318,7 @@ class UnitBuilder(PytestBuilder):
|
|||||||
|
|
||||||
class Flake8Builder(ArgBuilder):
|
class Flake8Builder(ArgBuilder):
|
||||||
def add_test_environment_args(self):
|
def add_test_environment_args(self):
|
||||||
self.args.extend(['flake8', '--select', 'E,W,F', '--ignore', 'W504'])
|
self.args.extend(['flake8', '--select', 'E,W,F', '--ignore', 'W504', '--max-line-length', '99'])
|
||||||
start = len(self.args)
|
start = len(self.args)
|
||||||
self.args.extend(self.parsed.flake8_args)
|
self.args.extend(self.parsed.flake8_args)
|
||||||
if len(self.args) == start:
|
if len(self.args) == start:
|
||||||
|
|||||||
2
setup.py
2
setup.py
@@ -24,7 +24,7 @@ with open(os.path.join(this_directory, 'README.md')) as f:
|
|||||||
|
|
||||||
|
|
||||||
package_name = "dbt"
|
package_name = "dbt"
|
||||||
package_version = "0.19.1b2"
|
package_version = "0.19.2"
|
||||||
description = """With dbt, data analysts and engineers can build analytics \
|
description = """With dbt, data analysts and engineers can build analytics \
|
||||||
the way engineers build applications."""
|
the way engineers build applications."""
|
||||||
|
|
||||||
|
|||||||
@@ -0,0 +1,8 @@
|
|||||||
|
name: 'local_utils'
|
||||||
|
version: '1.0'
|
||||||
|
config-version: 2
|
||||||
|
|
||||||
|
profile: 'default'
|
||||||
|
|
||||||
|
macro-paths: ["macros"]
|
||||||
|
|
||||||
@@ -0,0 +1,7 @@
|
|||||||
|
{% macro current_timestamp() -%}
|
||||||
|
{{ return(adapter.dispatch('current_timestamp', packages = local_utils._get_utils_namespaces())()) }}
|
||||||
|
{%- endmacro %}
|
||||||
|
|
||||||
|
{% macro default__current_timestamp() -%}
|
||||||
|
now()
|
||||||
|
{%- endmacro %}
|
||||||
@@ -0,0 +1,7 @@
|
|||||||
|
{% macro test_pkg_and_dispatch(model) -%}
|
||||||
|
{{ return(adapter.dispatch('test_pkg_and_dispatch', packages = local_utils._get_utils_namespaces())()) }}
|
||||||
|
{%- endmacro %}
|
||||||
|
|
||||||
|
{% macro default__test_pkg_and_dispatch(model) %}
|
||||||
|
select count(*) from {{ local_utils.current_timestamp() }}
|
||||||
|
{% endmacro %}
|
||||||
@@ -0,0 +1,28 @@
|
|||||||
|
{% macro datediff(first_date, second_date, datepart) %}
|
||||||
|
{{ return(adapter.dispatch('datediff', 'local_utils')(first_date, second_date, datepart)) }}
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
|
||||||
|
{% macro default__datediff(first_date, second_date, datepart) %}
|
||||||
|
|
||||||
|
datediff(
|
||||||
|
{{ datepart }},
|
||||||
|
{{ first_date }},
|
||||||
|
{{ second_date }}
|
||||||
|
)
|
||||||
|
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
|
||||||
|
{% macro postgres__datediff(first_date, second_date, datepart) %}
|
||||||
|
|
||||||
|
{% if datepart == 'year' %}
|
||||||
|
(date_part('year', ({{second_date}})::date) - date_part('year', ({{first_date}})::date))
|
||||||
|
{% elif datepart == 'quarter' %}
|
||||||
|
({{ local_utils.datediff(first_date, second_date, 'year') }} * 4 + date_part('quarter', ({{second_date}})::date) - date_part('quarter', ({{first_date}})::date))
|
||||||
|
{% else %}
|
||||||
|
( 1000 )
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
@@ -0,0 +1,4 @@
|
|||||||
|
{% macro _get_utils_namespaces() %}
|
||||||
|
{% set override_namespaces = var('local_utils_dispatch_list', []) %}
|
||||||
|
{% do return(override_namespaces + ['local_utils']) %}
|
||||||
|
{% endmacro %}
|
||||||
@@ -0,0 +1,19 @@
|
|||||||
|
{% macro test_type_one(model) %}
|
||||||
|
|
||||||
|
select count(*) from (
|
||||||
|
|
||||||
|
select * from {{ model }}
|
||||||
|
union all
|
||||||
|
select * from {{ ref('model_b') }}
|
||||||
|
|
||||||
|
) as Foo
|
||||||
|
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% macro test_type_two(model) %}
|
||||||
|
|
||||||
|
{{ config(severity = "WARN") }}
|
||||||
|
|
||||||
|
select count(*) from {{ model }}
|
||||||
|
|
||||||
|
{% endmacro %}
|
||||||
@@ -0,0 +1,3 @@
|
|||||||
|
{% macro test_call_pkg_macro(model) %}
|
||||||
|
select count(*) from {{ local_utils.current_timestamp() }}
|
||||||
|
{% endmacro %}
|
||||||
@@ -0,0 +1,3 @@
|
|||||||
|
{% macro test_my_datediff(model) %}
|
||||||
|
select {{ local_utils.datediff() }}
|
||||||
|
{% endmacro %}
|
||||||
@@ -0,0 +1,19 @@
|
|||||||
|
{% macro test_type_one(model) %}
|
||||||
|
|
||||||
|
select count(*) from (
|
||||||
|
|
||||||
|
select * from {{ model }}
|
||||||
|
union all
|
||||||
|
select * from {{ ref('model_b') }}
|
||||||
|
|
||||||
|
) as Foo
|
||||||
|
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% macro test_type_two(model) %}
|
||||||
|
|
||||||
|
{{ config(severity = "WARN") }}
|
||||||
|
|
||||||
|
select count(*) from {{ model }}
|
||||||
|
|
||||||
|
{% endmacro %}
|
||||||
@@ -0,0 +1,3 @@
|
|||||||
|
{% macro test_call_pkg_macro(model) %}
|
||||||
|
select count(*) from {{ test_utils.current_timestamp() }}
|
||||||
|
{% endmacro %}
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
select 1 as fun
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
select 1 as notfun
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
select 1 as fun
|
||||||
@@ -0,0 +1,13 @@
|
|||||||
|
|
||||||
|
version: 2
|
||||||
|
|
||||||
|
models:
|
||||||
|
- name: model_a
|
||||||
|
tests:
|
||||||
|
- type_one
|
||||||
|
- type_two
|
||||||
|
- name: model_c
|
||||||
|
tests:
|
||||||
|
- call_pkg_macro
|
||||||
|
- local_utils.pkg_and_dispatch
|
||||||
|
- my_datediff
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
select 1 as fun
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
select 1 as notfun
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
select 1 as fun
|
||||||
@@ -0,0 +1,12 @@
|
|||||||
|
|
||||||
|
version: 2
|
||||||
|
|
||||||
|
models:
|
||||||
|
- name: model_a
|
||||||
|
tests:
|
||||||
|
- type_one
|
||||||
|
- type_two
|
||||||
|
- name: model_c
|
||||||
|
tests:
|
||||||
|
- call_pkg_macro
|
||||||
|
- test_utils.pkg_and_dispatch
|
||||||
@@ -3,6 +3,7 @@ import os
|
|||||||
|
|
||||||
from dbt.task.test import TestTask
|
from dbt.task.test import TestTask
|
||||||
from dbt.exceptions import CompilationException
|
from dbt.exceptions import CompilationException
|
||||||
|
from dbt.contracts.results import TestStatus
|
||||||
|
|
||||||
|
|
||||||
class TestSchemaTests(DBTIntegrationTest):
|
class TestSchemaTests(DBTIntegrationTest):
|
||||||
@@ -390,6 +391,120 @@ class TestSchemaCaseInsensitive(DBTIntegrationTest):
|
|||||||
self.assertEqual(len(results), 1)
|
self.assertEqual(len(results), 1)
|
||||||
|
|
||||||
|
|
||||||
|
class TestSchemaTestContext(DBTIntegrationTest):
|
||||||
|
@property
|
||||||
|
def schema(self):
|
||||||
|
return "schema_tests_008"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def models(self):
|
||||||
|
return "test-context-models"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def project_config(self):
|
||||||
|
return {
|
||||||
|
'config-version': 2,
|
||||||
|
"macro-paths": ["test-context-macros"],
|
||||||
|
"vars": {
|
||||||
|
'local_utils_dispatch_list': ['local_utils']
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@property
|
||||||
|
def packages_config(self):
|
||||||
|
return {
|
||||||
|
"packages": [
|
||||||
|
{
|
||||||
|
'local': 'local_utils'
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
@use_profile('postgres')
|
||||||
|
def test_postgres_test_context_tests(self):
|
||||||
|
# This test tests the the TestContext and TestMacroNamespace
|
||||||
|
# are working correctly
|
||||||
|
self.run_dbt(['deps'])
|
||||||
|
results = self.run_dbt(strict=False)
|
||||||
|
self.assertEqual(len(results), 3)
|
||||||
|
|
||||||
|
run_result = self.run_dbt(['test'], expect_pass=False)
|
||||||
|
results = run_result.results
|
||||||
|
results = sorted(results, key=lambda r: r.node.name)
|
||||||
|
self.assertEqual(len(results), 5)
|
||||||
|
# call_pkg_macro_model_c_
|
||||||
|
self.assertEqual(results[0].status, TestStatus.Fail)
|
||||||
|
# pkg_and_dispatch_model_c_
|
||||||
|
self.assertEqual(results[1].status, TestStatus.Fail)
|
||||||
|
# my_datediff
|
||||||
|
self.assertRegex(results[2].node.compiled_sql, r'1000')
|
||||||
|
# type_one_model_a_
|
||||||
|
self.assertEqual(results[3].status, TestStatus.Fail)
|
||||||
|
self.assertRegex(results[3].node.compiled_sql, r'union all')
|
||||||
|
# type_two_model_a_
|
||||||
|
self.assertEqual(results[4].status, TestStatus.Fail)
|
||||||
|
self.assertEqual(results[4].node.config.severity, 'WARN')
|
||||||
|
|
||||||
|
|
||||||
|
class TestSchemaTestContextWithMacroNamespace(DBTIntegrationTest):
|
||||||
|
@property
|
||||||
|
def schema(self):
|
||||||
|
return "schema_tests_008"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def models(self):
|
||||||
|
return "test-context-models2"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def project_config(self):
|
||||||
|
return {
|
||||||
|
'config-version': 2,
|
||||||
|
"macro-paths": ["test-context-macros2"],
|
||||||
|
"dispatch": [
|
||||||
|
{
|
||||||
|
"macro_namespace": "test_utils",
|
||||||
|
"search_order": ['local_utils', 'test_utils'],
|
||||||
|
}
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
@property
|
||||||
|
def packages_config(self):
|
||||||
|
return {
|
||||||
|
"packages": [
|
||||||
|
{
|
||||||
|
'local': 'test_utils'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'local': 'local_utils'
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
@use_profile('postgres')
|
||||||
|
def test_postgres_test_context_with_macro_namespace(self):
|
||||||
|
# This test tests the the TestContext and TestMacroNamespace
|
||||||
|
# are working correctly
|
||||||
|
self.run_dbt(['deps'])
|
||||||
|
results = self.run_dbt(strict=False)
|
||||||
|
self.assertEqual(len(results), 3)
|
||||||
|
|
||||||
|
run_result = self.run_dbt(['test'], expect_pass=False)
|
||||||
|
results = run_result.results
|
||||||
|
results = sorted(results, key=lambda r: r.node.name)
|
||||||
|
self.assertEqual(len(results), 4)
|
||||||
|
# call_pkg_macro_model_c_
|
||||||
|
self.assertEqual(results[0].status, TestStatus.Fail)
|
||||||
|
# pkg_and_dispatch_model_c_
|
||||||
|
self.assertEqual(results[1].status, TestStatus.Fail)
|
||||||
|
# type_one_model_a_
|
||||||
|
self.assertEqual(results[2].status, TestStatus.Fail)
|
||||||
|
self.assertRegex(results[2].node.compiled_sql, r'union all')
|
||||||
|
# type_two_model_a_
|
||||||
|
self.assertEqual(results[3].status, TestStatus.Fail)
|
||||||
|
self.assertEqual(results[3].node.config.severity, 'WARN')
|
||||||
|
|
||||||
|
|
||||||
class TestSchemaYAMLExtension(DBTIntegrationTest):
|
class TestSchemaYAMLExtension(DBTIntegrationTest):
|
||||||
@property
|
@property
|
||||||
def schema(self):
|
def schema(self):
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user