Compare commits

...

2 Commits

Author SHA1 Message Date
MichelleArk
1fec913cd1 update adapter requirements 2025-11-26 16:26:45 -05:00
MichelleArk
e4b83a67b3 consider cost prior to retrieving batch metadata on a per-adapter basis 2025-11-26 16:24:56 -05:00
5 changed files with 19 additions and 3 deletions

View File

@@ -778,6 +778,7 @@ def source(ctx, **kwargs):
@p.target_path
@p.threads
@p.vars
@p.force_batched
@requires.postflight
@requires.preflight
@requires.profile

View File

@@ -266,6 +266,13 @@ favor_state = _create_option_and_track_env_var(
help="If set, defer to the argument provided to the state flag for resolving unselected nodes, even if the node(s) exist as a database object in the current environment.",
)
force_batched = _create_option_and_track_env_var(
"--force-batched/--no-force-batched",
envvar="DBT_ENGINE_FORCE_BATCHED",
help="If set, dbt will force the use of batched operations for source freshness checks.",
is_flag=True,
)
full_refresh = _create_option_and_track_env_var(
"--full-refresh",
"-f",

View File

@@ -139,7 +139,7 @@ class FreshnessRunner(BaseRunner):
)
status = compiled_node.freshness.status(freshness["age"])
elif self.adapter.supports(Capability.TableLastModifiedMetadata):
elif self.adapter.supports(Capability.TableLastModifiedMetadata, accept_cost=self.config.args.force_batched):
if compiled_node.freshness.filter is not None:
fire_event(
Note(
@@ -232,11 +232,18 @@ class FreshnessTask(RunTask):
before_run_status = super().before_run(adapter, selected_uids)
if before_run_status == RunStatus.Success and adapter.supports(
Capability.TableLastModifiedMetadataBatch
Capability.TableLastModifiedMetadataBatch, accept_cost=self.args.force_batched
):
populate_metadata_freshness_cache_status = self.populate_metadata_freshness_cache(
adapter, selected_uids
)
elif adapter.supports_with_cost(Capability.TableLastModifiedMetadataBatch) and not self.args.force_batched:
fire_event(
Note(
msg=f"The {adapter.type()} adapter supports batched metadata freshness checks, but the `--force-batched flag` is required to leverage it due to cost considerations. Using non-batched metadata freshness checks instead.\nPlease visit https://docs.getdbt.com/docs/build/sources#batch-metadata-freshness-checks for adapter-specific information."
),
EventLevel.INFO,
)
if (
before_run_status == RunStatus.Success

View File

@@ -81,6 +81,7 @@ dependencies = [
"dbt-semantic-interfaces>=0.9.0,<0.10",
# Minor versions for these are expected to be backwards-compatible
"dbt-common>=1.27.0,<2.0",
# TODO: bump before merging
"dbt-adapters>=1.15.5,<2.0",
"dbt-protos>=1.0.397,<2.0",
"pydantic<3",

View File

@@ -1,4 +1,4 @@
git+https://github.com/dbt-labs/dbt-adapters.git@main#subdirectory=dbt-adapters
git+https://github.com/dbt-labs/dbt-adapters.git@bq-batch-metadata-source-freshness#subdirectory=dbt-adapters
git+https://github.com/dbt-labs/dbt-adapters.git@main#subdirectory=dbt-tests-adapter
git+https://github.com/dbt-labs/dbt-common.git@main
git+https://github.com/dbt-labs/dbt-adapters.git@main#subdirectory=dbt-postgres