diff --git a/docs/docs-beta/docs/guides/quality-testing/data-freshness-testing.md b/docs/docs-beta/docs/guides/quality-testing/data-freshness-testing.md index 83f664b46ec6d..a391dfc0baf26 100644 --- a/docs/docs-beta/docs/guides/quality-testing/data-freshness-testing.md +++ b/docs/docs-beta/docs/guides/quality-testing/data-freshness-testing.md @@ -1,4 +1,43 @@ --- title: "Test for data freshness" sidebar_position: 20 ---- \ No newline at end of file +--- +Freshness checks provide a way to identify data assets that are overdue for an update. + +This guide covers how to construct freshness checks for materializable [assets](/todo) and [external assets](/todo). + +
+ Prerequisites + +To follow the steps in this guide, you'll need: + +- Familiarity with [assets](/todo) +- Familiarity with [asset checks](/todo) + +
+ +## Test data freshness for materializable assets + +The example below defines a freshness check on an asset that fails if the asset's latest materialization occurred more than one hour before the current time. + +Defining a schedule or sensor is required to ensure the freshness check executes. If the check only runs after the asset has been materialized, the check won't be able to detect the times materialization fails. + + + +## Test data freshness for external assets + +To run freshness checks on external assets, the checks need to know when the external assets were last updated. Emitting these update timestamps in observation metadata allows Dagster to calculate whether the asset is overdue. + +The example below defines a freshness check and adds a schedule to run the check periodically. + + + +### Use anomaly detection to test data freshness (Dagster+ Pro) + +Instead of applying policies on an asset-by-asset basis, Dagster+ Pro users can take advantage of a time series anomaly detection model to determine if data is arriving later than expected. + + + +## Next steps + +- Explore more [asset checks](/todo) diff --git a/examples/docs_beta_snippets/docs_beta_snippets/guides/data-assets/quality-testing/freshness-checks/anomaly-detection.py b/examples/docs_beta_snippets/docs_beta_snippets/guides/data-assets/quality-testing/freshness-checks/anomaly-detection.py new file mode 100644 index 0000000000000..fd95e7ce75f16 --- /dev/null +++ b/examples/docs_beta_snippets/docs_beta_snippets/guides/data-assets/quality-testing/freshness-checks/anomaly-detection.py @@ -0,0 +1,7 @@ +from dagster_cloud.anomaly_detection import build_anomaly_detection_freshness_checks + +hourly_sales = ... + +freshness_checks = build_anomaly_detection_freshness_checks( + assets=[hourly_sales], params=None +) diff --git a/examples/docs_beta_snippets/docs_beta_snippets/guides/data-assets/quality-testing/freshness-checks/external-asset-freshness-check.py b/examples/docs_beta_snippets/docs_beta_snippets/guides/data-assets/quality-testing/freshness-checks/external-asset-freshness-check.py new file mode 100644 index 0000000000000..cfe78fe3a55e4 --- /dev/null +++ b/examples/docs_beta_snippets/docs_beta_snippets/guides/data-assets/quality-testing/freshness-checks/external-asset-freshness-check.py @@ -0,0 +1,55 @@ +from datetime import timedelta + +import dagster_snowflake as dg_snowflake + +import dagster as dg + + +@dg.observable_source_asset(specs=[dg.AssetSpec("hourly_sales")]) +def hourly_sales(snowflake: dg_snowflake.SnowflakeResource): + with snowflake.get_connection() as conn: + freshness_results = dg_snowflake.fetch_last_updated_timestamps( + snowflake_connection=conn.cursor(), + tables=["hourly_sales"], + schema="PUBLIC", + ) + for table_name, last_updated in freshness_results.items(): + yield dg.ObserveResult( + asset_key=table_name, + metadata={ + "dagster/last_updated_timestamp": dg.MetadataValue.timestamp( + last_updated + ) + }, + ) + + +freshness_check_schedule = dg.ScheduleDefinition( + job=dg.define_asset_job( + "hourly_sales_observation_job", + selection=dg.AssetSelection.assets(hourly_sales), + ), + # Runs every minute. Usually, a much less frequent cadence is necessary, + # but a short cadence makes it easier to play around with this example. + cron_schedule="* * * * *", +) + + +hourly_sales_freshness_check = dg.build_last_update_freshness_checks( + assets=[hourly_sales], + lower_bound_delta=timedelta(hours=1), +) + + +defs = dg.Definitions( + assets=[hourly_sales], + asset_checks=hourly_sales_freshness_check, + schedules=[freshness_check_schedule], + resources={ + "snowflake": dg_snowflake.SnowflakeResource( + user=dg.EnvVar("SNOWFLAKE_USER"), + account=dg.EnvVar("SNOWFLAKE_ACCOUNT"), + password=dg.EnvVar("SNOWFLAKE_PASSWORD"), + ) + }, +) diff --git a/examples/docs_beta_snippets/docs_beta_snippets/guides/data-assets/quality-testing/freshness-checks/materializable-asset-freshness-check.py b/examples/docs_beta_snippets/docs_beta_snippets/guides/data-assets/quality-testing/freshness-checks/materializable-asset-freshness-check.py new file mode 100644 index 0000000000000..ade52d46aaaaf --- /dev/null +++ b/examples/docs_beta_snippets/docs_beta_snippets/guides/data-assets/quality-testing/freshness-checks/materializable-asset-freshness-check.py @@ -0,0 +1,22 @@ +from datetime import timedelta + +import dagster as dg + + +@dg.asset +def hourly_sales(context: dg.AssetExecutionContext): + context.log.info("Fetching and emitting hourly sales data") + ... + + +hourly_sales_freshness_check = dg.build_last_update_freshness_checks( + assets=[hourly_sales], lower_bound_delta=timedelta(hours=1) +) +freshness_checks_sensor = dg.build_sensor_for_freshness_checks( + freshness_checks=hourly_sales_freshness_check +) +defs = dg.Definitions( + assets=[hourly_sales], + asset_checks=hourly_sales_freshness_check, + sensors=[freshness_checks_sensor], +) diff --git a/examples/experimental/dagster-airlift/examples/dbt-example/dbt_example/airflow_dags/dags.py b/examples/experimental/dagster-airlift/examples/dbt-example/dbt_example/airflow_dags/dags.py index 1f4454dded617..208c76bcbe2d8 100644 --- a/examples/experimental/dagster-airlift/examples/dbt-example/dbt_example/airflow_dags/dags.py +++ b/examples/experimental/dagster-airlift/examples/dbt-example/dbt_example/airflow_dags/dags.py @@ -8,6 +8,7 @@ from airflow.operators.bash import BashOperator from dagster_airlift.in_airflow import mark_as_dagster_migrating from dagster_airlift.migration_state import load_migration_state_from_yaml + from dbt_example.shared.lakehouse_utils import load_csv_to_duckdb from dbt_example.shared.load_iris import CSV_PATH, DB_PATH, IRIS_COLUMNS