Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Improve error message for not null constraints violated #532

Merged
merged 2 commits into from
Oct 31, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 14 additions & 3 deletions src/databricks/labs/ucx/framework/crawlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,14 +45,25 @@ def _schema_for(cls, klass):
fields.append(f"{f.name} {spark_type}{not_null}")
return ", ".join(fields)

from dataclasses import asdict, dataclass, fields

@classmethod
def _filter_none_rows(cls, rows):
def _filter_none_rows(cls, rows, klass):
if len(rows) == 0:
return rows

results = []
class_fields = dataclasses.fields(klass)
for row in rows:
if row is None:
continue
for field in class_fields:
if not hasattr(row, field.name):
logger.debug(f"Field {field.name} not present in row {dataclasses.asdict(row)}")
continue
if field.default is not None and getattr(row, field.name) is None:
msg = f"Not null constraint violated for column {field.name}, row = {dataclasses.asdict(row)}"
raise ValueError(msg)
results.append(row)
return results

Expand All @@ -75,7 +86,7 @@ def save_table(self, full_name: str, rows: list[any], klass: dataclasses.datacla
if mode == "overwrite":
msg = "Overwrite mode is not yet supported"
raise NotImplementedError(msg)
rows = self._filter_none_rows(rows)
rows = self._filter_none_rows(rows, klass)
self.create_table(full_name, klass)
if len(rows) == 0:
return
Expand Down Expand Up @@ -126,7 +137,7 @@ def fetch(self, sql) -> Iterator[any]:
return self._spark.sql(sql).collect()

def save_table(self, full_name: str, rows: list[any], klass: dataclasses.dataclass, mode: str = "append"):
rows = self._filter_none_rows(rows)
rows = self._filter_none_rows(rows, klass)

if len(rows) == 0:
self.create_table(full_name, klass)
Expand Down
33 changes: 24 additions & 9 deletions tests/unit/framework/test_crawlers.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import os
import sys
from dataclasses import dataclass
from unittest import mock

import pytest
from databricks.sdk.service import sql
Expand Down Expand Up @@ -163,8 +164,6 @@ def test_statement_execution_backend_save_table_in_batches_of_two(mocker):


def test_runtime_backend_execute(mocker):
from unittest import mock

with mock.patch.dict(os.environ, {"DATABRICKS_RUNTIME_VERSION": "14.0"}):
pyspark_sql_session = mocker.Mock()
sys.modules["pyspark.sql.session"] = pyspark_sql_session
Expand All @@ -177,8 +176,6 @@ def test_runtime_backend_execute(mocker):


def test_runtime_backend_fetch(mocker):
from unittest import mock

with mock.patch.dict(os.environ, {"DATABRICKS_RUNTIME_VERSION": "14.0"}):
pyspark_sql_session = mocker.Mock()
sys.modules["pyspark.sql.session"] = pyspark_sql_session
Expand All @@ -194,8 +191,6 @@ def test_runtime_backend_fetch(mocker):


def test_runtime_backend_save_table(mocker):
from unittest import mock

with mock.patch.dict(os.environ, {"DATABRICKS_RUNTIME_VERSION": "14.0"}):
pyspark_sql_session = mocker.Mock()
sys.modules["pyspark.sql.session"] = pyspark_sql_session
Expand All @@ -212,18 +207,38 @@ def test_runtime_backend_save_table(mocker):


def test_runtime_backend_save_table_with_row_containing_none_with_nullable_class(mocker):
from unittest import mock

with mock.patch.dict(os.environ, {"DATABRICKS_RUNTIME_VERSION": "14.0"}):
pyspark_sql_session = mocker.Mock()
sys.modules["pyspark.sql.session"] = pyspark_sql_session

rb = RuntimeBackend()

rb.save_table("a.b.c", [Baz("aaa", "ccc"), Baz("bbb", None)], Bar)
rb.save_table("a.b.c", [Baz("aaa", "ccc"), Baz("bbb", None)], Baz)

rb._spark.createDataFrame.assert_called_with(
[Baz(first="aaa", second="ccc"), Baz(first="bbb", second=None)],
"first STRING NOT NULL, second STRING",
)
rb._spark.createDataFrame().write.saveAsTable.assert_called_with("a.b.c", mode="append")


def test_save_table_with_not_null_constraint_violated(mocker):
@dataclass
class TestClass:
key: str
value: str = None

rows = [TestClass("1", "test"), TestClass("2", None), TestClass(None, "value")]

with mock.patch.dict(os.environ, {"DATABRICKS_RUNTIME_VERSION": "14.0"}):
pyspark_sql_session = mocker.Mock()
sys.modules["pyspark.sql.session"] = pyspark_sql_session

rb = RuntimeBackend()

with pytest.raises(Exception) as exc_info:
rb.save_table("a.b.c", rows, TestClass)

assert (
str(exc_info.value) == "Not null constraint violated for column key, row = {'key': None, 'value': 'value'}"
)
Loading