Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[BUGFIX] Batch Expectations correctly handle date min and max values #10613

Merged
merged 5 commits into from
Nov 1, 2024
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions great_expectations/compatibility/sqlalchemy.py
Original file line number Diff line number Diff line change
Expand Up @@ -277,3 +277,9 @@
__version__: str | None = sqlalchemy.__version__
except (ImportError, AttributeError):
__version__ = None


try:
from sqlalchemy.sql import sqltypes
except (ImportError, AttributeError):
sqltypes = SQLALCHEMY_NOT_IMPORTED # type: ignore[assignment]

Check warning on line 285 in great_expectations/compatibility/sqlalchemy.py

View check run for this annotation

Codecov / codecov/patch

great_expectations/compatibility/sqlalchemy.py#L284-L285

Added lines #L284 - L285 were not covered by tests
10 changes: 10 additions & 0 deletions great_expectations/expectations/expectation.py
Original file line number Diff line number Diff line change
Expand Up @@ -1656,6 +1656,16 @@
representation.""" # noqa: E501
)

if isinstance(min_value, datetime.date) or isinstance(max_value, datetime.date):
if not isinstance(metric_value, datetime.date):
try:
metric_value = parse(metric_value).date()
except TypeError:
raise ValueError( # noqa: TRY003

Check warning on line 1664 in great_expectations/expectations/expectation.py

View check run for this annotation

Codecov / codecov/patch

great_expectations/expectations/expectation.py#L1663-L1664

Added lines #L1663 - L1664 were not covered by tests
f"""Could not parse "metric_value" of {metric_value} (of type "{type(metric_value)!s}) into datetime \
representation.""" # noqa: E501
)

# Checking if mean lies between thresholds
if min_value is not None:
if strict_min:
Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
from datetime import datetime, timezone

import pandas as pd
import sqlalchemy.dialects.postgresql as POSTGRESQL_TYPES
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

we shouldn't import directly from sqlalchemy

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Note to non-Josh folks: the postgresql types were just imported from the sqla types, so they are equivalent. Which explains mypy being fine with this change.


import great_expectations.expectations as gxe
from great_expectations.compatibility.snowflake import SNOWFLAKE_TYPES
from great_expectations.compatibility.sqlalchemy import sqltypes
from tests.integration.conftest import parameterize_batch_for_data_sources
from tests.integration.test_utils.data_source_config import (
PandasDataFrameDatasourceTestConfig,
Expand All @@ -16,7 +18,7 @@
data_source_configs=[
PandasDataFrameDatasourceTestConfig(),
PandasFilesystemCsvDatasourceTestConfig(),
PostgreSQLDatasourceTestConfig(column_types={"a": POSTGRESQL_TYPES.INTEGER}),
PostgreSQLDatasourceTestConfig(column_types={"a": sqltypes.INTEGER}),
SnowflakeDatasourceTestConfig(column_types={"a": SNOWFLAKE_TYPES.NUMBER}),
],
data=pd.DataFrame({"a": [1, 2]}),
Expand All @@ -27,12 +29,66 @@ def test_expect_column_min_to_be_between(batch_for_datasource) -> None:
assert result.success


@parameterize_batch_for_data_sources(
data_source_configs=[
PandasDataFrameDatasourceTestConfig(),
PandasFilesystemCsvDatasourceTestConfig(),
PostgreSQLDatasourceTestConfig(column_types={"date": sqltypes.DATE}),
SnowflakeDatasourceTestConfig(column_types={"date": sqltypes.DATE}),
],
data=pd.DataFrame(
{
"date": [
datetime(year=2021, month=1, day=31, tzinfo=timezone.utc).date(),
datetime(year=2022, month=1, day=31, tzinfo=timezone.utc).date(),
datetime(year=2023, month=1, day=31, tzinfo=timezone.utc).date(),
]
}
),
)
def test_expect_column_min_to_be_between__date(batch_for_datasource) -> None:
expectation = gxe.ExpectColumnMinToBeBetween(
column="date",
min_value=datetime(year=2021, month=1, day=1, tzinfo=timezone.utc).date(),
max_value=datetime(year=2022, month=1, day=1, tzinfo=timezone.utc).date(),
)
result = batch_for_datasource.validate(expectation)
assert result.success


@parameterize_batch_for_data_sources(
data_source_configs=[
PandasDataFrameDatasourceTestConfig(),
PandasFilesystemCsvDatasourceTestConfig(),
PostgreSQLDatasourceTestConfig(column_types={"date": sqltypes.DATE}),
SnowflakeDatasourceTestConfig(column_types={"date": sqltypes.DATE}),
],
data=pd.DataFrame(
{
"date": [
datetime(year=2021, month=1, day=31, tzinfo=timezone.utc).date(),
datetime(year=2022, month=1, day=31, tzinfo=timezone.utc).date(),
datetime(year=2023, month=1, day=31, tzinfo=timezone.utc).date(),
]
}
),
)
def test_expect_column_max_to_be_between__date(batch_for_datasource) -> None:
expectation = gxe.ExpectColumnMaxToBeBetween(
column="date",
min_value=datetime(year=2023, month=1, day=1, tzinfo=timezone.utc).date(),
max_value=datetime(year=2024, month=1, day=1, tzinfo=timezone.utc).date(),
)
result = batch_for_datasource.validate(expectation)
assert result.success


@parameterize_batch_for_data_sources(
data_source_configs=[
PandasDataFrameDatasourceTestConfig(),
PandasFilesystemCsvDatasourceTestConfig(),
SnowflakeDatasourceTestConfig(column_types={"a": SNOWFLAKE_TYPES.NUMBER}),
PostgreSQLDatasourceTestConfig(column_types={"a": POSTGRESQL_TYPES.INTEGER}),
PostgreSQLDatasourceTestConfig(column_types={"a": sqltypes.INTEGER}),
],
data=pd.DataFrame({"a": [1, 2]}),
)
Expand All @@ -47,7 +103,7 @@ def test_expect_column_max_to_be_between(batch_for_datasource) -> None:
PandasDataFrameDatasourceTestConfig(),
PandasFilesystemCsvDatasourceTestConfig(),
SnowflakeDatasourceTestConfig(column_types={"a": SNOWFLAKE_TYPES.NUMBER}),
PostgreSQLDatasourceTestConfig(column_types={"a": POSTGRESQL_TYPES.INTEGER}),
PostgreSQLDatasourceTestConfig(column_types={"a": sqltypes.INTEGER}),
],
data=pd.DataFrame({"a": [1, 2]}),
)
Expand All @@ -62,7 +118,7 @@ def test_expect_column_to_exist(batch_for_datasource):
PandasDataFrameDatasourceTestConfig(),
PandasFilesystemCsvDatasourceTestConfig(),
SnowflakeDatasourceTestConfig(column_types={"a": SNOWFLAKE_TYPES.NUMBER}),
PostgreSQLDatasourceTestConfig(column_types={"a": POSTGRESQL_TYPES.INTEGER}),
PostgreSQLDatasourceTestConfig(column_types={"a": sqltypes.INTEGER}),
],
data=pd.DataFrame({"a": [1, 2]}),
)
Expand All @@ -77,7 +133,7 @@ def test_expect_column_values_to_not_be_null(batch_for_datasource):
PandasDataFrameDatasourceTestConfig(),
PandasFilesystemCsvDatasourceTestConfig(),
SnowflakeDatasourceTestConfig(column_types={"a": SNOWFLAKE_TYPES.NUMBER}),
PostgreSQLDatasourceTestConfig(column_types={"a": POSTGRESQL_TYPES.INTEGER}),
PostgreSQLDatasourceTestConfig(column_types={"a": sqltypes.INTEGER}),
],
data=pd.DataFrame({"a": [1, 2, 3, 4]}),
)
Expand All @@ -91,8 +147,8 @@ class TestExpectTableRowCountToEqualOtherTable:
@parameterize_batch_for_data_sources(
data_source_configs=[
PostgreSQLDatasourceTestConfig(
column_types={"col_a": POSTGRESQL_TYPES.INTEGER},
extra_assets={"test_table_two": {"col_b": POSTGRESQL_TYPES.VARCHAR}},
column_types={"col_a": sqltypes.INTEGER},
extra_assets={"test_table_two": {"col_b": sqltypes.VARCHAR}},
),
],
data=pd.DataFrame({"a": [1, 2, 3, 4]}),
Expand All @@ -106,8 +162,8 @@ def test_success(self, batch_for_datasource):
@parameterize_batch_for_data_sources(
data_source_configs=[
PostgreSQLDatasourceTestConfig(
column_types={"col_a": POSTGRESQL_TYPES.INTEGER},
extra_assets={"test_table_two": {"col_b": POSTGRESQL_TYPES.VARCHAR}},
column_types={"col_a": sqltypes.INTEGER},
extra_assets={"test_table_two": {"col_b": sqltypes.VARCHAR}},
),
],
data=pd.DataFrame({"a": [1, 2, 3, 4]}),
Expand All @@ -125,7 +181,7 @@ def test_different_counts(self, batch_for_datasource):
@parameterize_batch_for_data_sources(
data_source_configs=[
PostgreSQLDatasourceTestConfig(
column_types={"col_a": POSTGRESQL_TYPES.INTEGER},
column_types={"col_a": sqltypes.INTEGER},
),
],
data=pd.DataFrame({"a": [1, 2, 3, 4]}),
Expand Down
Loading