Skip to content

Commit

Permalink
Generate Bigeye monitoring configs in CI (#6194)
Browse files Browse the repository at this point in the history
* Generate Bigeye monitoring configs in CI

* Ensure Bigconfig files are loaded exactly once

* Avoid duplicate validation of Bigconfig files

* Authentication using API key to Bigeye

* Remove api-key option for Bigeye and rely on env var instead
  • Loading branch information
scholtzan committed Sep 17, 2024
1 parent 1cbc8a5 commit 13349e8
Show file tree
Hide file tree
Showing 2 changed files with 50 additions and 13 deletions.
2 changes: 2 additions & 0 deletions .circleci/workflows.yml
Original file line number Diff line number Diff line change
Expand Up @@ -570,6 +570,7 @@ jobs:
--sql-dir /tmp/workspace/generated-sql/sql/ \
/tmp/workspace/generated-sql/sql/
PATH="venv/bin:$PATH" script/bqetl format /tmp/workspace/generated-sql/sql/
PATH="venv/bin:$PATH" script/bqetl monitoring update /tmp/workspace/generated-sql/sql/
- persist_to_workspace:
root: /tmp/workspace
paths:
Expand Down Expand Up @@ -785,6 +786,7 @@ jobs:
--sql-dir /tmp/workspace/private-generated-sql/sql/ \
/tmp/workspace/private-generated-sql/sql/
PATH="venv/bin:$PATH" script/bqetl format /tmp/workspace/private-generated-sql/sql/
PATH="venv/bin:$PATH" script/bqetl monitoring update /tmp/workspace/private-generated-sql/sql/
# Change directory to generate DAGs so `sql_file_path` values are relative to the repo root.
export PATH="$PWD/venv/bin:$PATH"
Expand Down
61 changes: 48 additions & 13 deletions bigquery_etl/cli/monitoring.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,15 @@
"""bigquery-etl CLI monitoring command."""

import os
import sys
from pathlib import Path
from typing import Optional

import click
from bigeye_sdk.authentication.api_authentication import ApiAuth
from bigeye_sdk.authentication.api_authentication import APIKeyAuth
from bigeye_sdk.client.datawatch_client import datawatch_client_factory
from bigeye_sdk.controller.metric_suite_controller import MetricSuiteController
from bigeye_sdk.exceptions.exceptions import FileLoadException
from bigeye_sdk.model.big_config import BigConfig, TableDeployment, TableDeploymentSuite
from bigeye_sdk.model.protobuf_message_facade import (
SimpleMetricDefinition,
Expand Down Expand Up @@ -40,39 +42,66 @@ def monitoring(ctx):
help="""
Deploy monitors defined in the BigConfig files to Bigeye.
Requires BigConfig credentials to be set via BIGEYE_API_CRED_FILE env variable.
Requires BigConfig API key to be set via BIGEYE_API_KEY env variable.
"""
)
@click.argument("name")
@project_id_option()
@sql_dir_option
@click.option(
"--workspace",
default="DEFAULT",
default=463,
help="Bigeye workspace to use when authenticating to API.",
)
@click.option(
"--base-url",
"--base_url",
default="https://app.bigeye.com",
help="Bigeye base URL.",
)
@click.pass_context
def deploy(
ctx, name: str, sql_dir: Optional[str], project_id: Optional[str], workspace: str
ctx,
name: str,
sql_dir: Optional[str],
project_id: Optional[str],
workspace: str,
base_url: str,
) -> None:
"""Deploy Bigeye config."""
api_key = os.environ.get("BIGEYE_API_KEY")
if api_key is None:
click.echo(
"Bigeye API token needs to be set via `BIGEYE_API_KEY` env variable."
)
sys.exit(1)

"""Deploy monitors to Bigeye."""
metadata_files = paths_matching_name_pattern(
name, sql_dir, project_id=project_id, files=["metadata.yaml"]
)

for metadata_file in metadata_files:
for metadata_file in list(set(metadata_files)):
project, dataset, table = extract_from_query_path(metadata_file)
try:
metadata = Metadata.from_file(metadata_file)
if metadata.monitoring and metadata.monitoring.enabled:
ctx.invoke(update, name=name, sql_dir=sql_dir, project_id=project_id)
api_auth = ApiAuth.load_from_ini_file(
auth_file=ApiAuth.find_user_credentials(None), workspace=workspace
) # load user credentials from BIGEYE_API_CONFIG_FILE env variable
client = datawatch_client_factory(api_auth, workspace_id=463)
ctx.invoke(
update,
name=metadata_file.parent,
sql_dir=sql_dir,
project_id=project_id,
)
api_auth = APIKeyAuth(base_url=base_url, api_key=api_key)
client = datawatch_client_factory(api_auth, workspace_id=workspace)
mc = MetricSuiteController(client=client)

ctx.invoke(validate, name=name, sql_dir=sql_dir, project_id=project_id)
ctx.invoke(
validate,
name=metadata_file.parent,
sql_dir=sql_dir,
project_id=project_id,
)
mc.execute_bigconfig(
input_path=[metadata_file.parent / BIGCONFIG_FILE],
output_path=Path(sql_dir).parent if sql_dir else None,
Expand Down Expand Up @@ -102,7 +131,7 @@ def update(name: str, sql_dir: Optional[str], project_id: Optional[str]) -> None
name, sql_dir, project_id=project_id, files=["metadata.yaml"]
)

for metadata_file in metadata_files:
for metadata_file in list(set(metadata_files)):
project, dataset, table = extract_from_query_path(metadata_file)
try:
metadata = Metadata.from_file(metadata_file)
Expand Down Expand Up @@ -175,9 +204,15 @@ def validate(name: str, sql_dir: Optional[str], project_id: Optional[str]) -> No

invalid = False

for bigconfig_file in bigconfig_files:
for bigconfig_file in list(set(bigconfig_files)):
try:
BigConfig.load(bigconfig_file)
except FileLoadException as e:
if "Duplicate" in e.message:
pass
else:
click.echo(f"Invalid BigConfig file {bigconfig_file}: {e}")
invalid = True
except Exception as e:
click.echo(f"Invalid BigConfig file {bigconfig_file}: {e}")
invalid = True
Expand Down

1 comment on commit 13349e8

@dataops-ci-bot
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Integration report for "Generate Bigeye monitoring configs in CI (#6194)"

sql.diff

No content detected.

Please sign in to comment.