Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore(dbt): move logic to a function #258

Merged
merged 2 commits into from
Jan 10, 2024
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
67 changes: 39 additions & 28 deletions src/preset_cli/cli/superset/sync/dbt/command.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import sys
import warnings
from pathlib import Path
from typing import List, Optional, Tuple
from typing import Dict, List, Optional, Tuple

import click
import yaml
Expand Down Expand Up @@ -335,6 +335,43 @@ def get_job(
raise ValueError(f"Job {job_id} not available")


def process_sl_metrics(
dbt_client: DBTClient,
environment_id: int,
model_map: Dict[ModelKey, ModelSchema],
) -> Optional[List[MFMetricWithSQLSchema]]:
"""
Fetch metrics from the semantic layer and return the ones we can map to models.
"""
dialect = dbt_client.get_sl_dialect(environment_id)
mf_metric_schema = MFMetricWithSQLSchema()
sl_metrics: List[MFMetricWithSQLSchema] = []
for metric in dbt_client.get_sl_metrics(environment_id):
sql = dbt_client.get_sl_metric_sql(metric["name"], environment_id)
if sql is None:
continue

try:
model = get_model_from_sql(sql, dialect, model_map)
except MultipleModelsError:
continue

sl_metrics.append(
mf_metric_schema.load(
{
"name": metric["name"],
"type": metric["type"],
"description": metric["description"],
"sql": sql,
"dialect": dialect.value,
"model": model["unique_id"],
},
),
)

return sl_metrics


@click.command()
@click.argument("token")
@click.argument("job_id", type=click.INT, required=False, default=None)
Expand Down Expand Up @@ -448,34 +485,8 @@ def dbt_cloud( # pylint: disable=too-many-arguments, too-many-locals
models = apply_select(models, select, exclude)
model_map = {ModelKey(model["schema"], model["name"]): model for model in models}

# original dbt <= 1.6 metrics
og_metrics = dbt_client.get_og_metrics(job["id"])

# MetricFlow metrics
dialect = dbt_client.get_sl_dialect(job["environment_id"])
mf_metric_schema = MFMetricWithSQLSchema()
sl_metrics: List[MFMetricWithSQLSchema] = []
for metric in dbt_client.get_sl_metrics(job["environment_id"]):
sql = dbt_client.get_sl_metric_sql(metric["name"], job["environment_id"])
if sql is not None:
try:
model = get_model_from_sql(sql, dialect, model_map)
except MultipleModelsError:
continue

sl_metrics.append(
mf_metric_schema.load(
{
"name": metric["name"],
"type": metric["type"],
"description": metric["description"],
"sql": sql,
"dialect": dialect.value,
"model": model["unique_id"],
},
),
)

sl_metrics = process_sl_metrics(dbt_client, job["environment_id"], model_map)
superset_metrics = get_superset_metrics_per_model(og_metrics, sl_metrics)

if exposures_only:
Expand Down
Loading