Skip to content

Commit

Permalink
cloudwatch message reader docs
Browse files Browse the repository at this point in the history
  • Loading branch information
danielgafni committed Aug 7, 2024
1 parent af7fce7 commit 0c307aa
Show file tree
Hide file tree
Showing 9 changed files with 58 additions and 19 deletions.
Binary file modified docs/content/api/modules.json.gz
Binary file not shown.
Binary file modified docs/content/api/searchindex.json.gz
Binary file not shown.
Binary file modified docs/content/api/sections.json.gz
Binary file not shown.
17 changes: 9 additions & 8 deletions docs/content/concepts/dagster-pipes/aws-glue.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ description: "Learn to integrate Dagster Pipes with AWS Glue to launch external
This tutorial gives a short overview on how to use [Dagster Pipes](/concepts/dagster-pipes) with [AWS Glue](https://aws.amazon.com/glue/).

The [dagster-aws](/\_apidocs/libraries/dagster-aws) integration library provides the <PyObject object="PipesGlueClient" module="dagster_aws.pipes" /> resource which can be used to launch AWS Glue jobs from Dagster assets and ops. Dagster can receive regular events like logs, asset checks, or asset materializations from jobs launched with this client. Using it requires minimal code changes on the job side.

---

## Prerequisites
Expand Down Expand Up @@ -46,24 +45,26 @@ Call `open_dagster_pipes` in the Glue job script to create a context that can be
import boto3
from dagster_pipes import (
PipesCliArgsParamsLoader,
PipesDefaultMessageWriter,
PipesS3ContextLoader,
PipesS3MessageWriter,
open_dagster_pipes,
)

client = boto3.client("s3")
context_loader = PipesS3ContextLoader(client)
message_writer = PipesS3MessageWriter(client)
params_loader = PipesCliArgsParamsLoader()


def main():
with open_dagster_pipes(
context_loader=context_loader,
message_writer=message_writer,
params_loader=params_loader,
) as pipes:
pipes.log.info("Hello from AWS Glue job!")
pipes.report_asset_materialization(
metadata={"some_metric": {"raw_value": 0, "type": "int"}},
data_version="alpha",
)


if __name__ == "__main__":
Expand Down Expand Up @@ -107,7 +108,7 @@ Next, add the `PipesGlueClient` resource to your project's <PyObject object="Def

```python file=/guides/dagster/dagster_pipes/glue/dagster_code.py startafter=start_definitions_marker endbefore=end_definitions_marker
from dagster import Definitions # noqa
from dagster_aws.pipes import PipesGlueContextInjector, PipesS3MessageReader
from dagster_aws.pipes import PipesGlueContextInjector


bucket = os.environ["DAGSTER_GLUE_S3_CONTEXT_BUCKET"]
Expand All @@ -122,16 +123,16 @@ defs = Definitions(
client=boto3.client("s3"),
bucket=bucket,
),
message_reader=PipesS3MessageReader(
client=boto3.client("s3"), bucket=bucket
),
)
},
)
```

Dagster will now be able to launch the AWS Glue job from the `glue_pipes_asset` asset.

By default the client will be using the CloudWatch log stream (`.../output/<job-run-id>`) created by the Glue job to receive Dagster events and will forward this stream to `stdout`. If this is undesired, instead, the client can be configured to use <PyObject object="PipesS3MessageReader" module="dagster_aws.pipes" />, and the Glue job can use <PyObject object="PipesS3MessageWriter" module="dagster_pipes" /> .


---

## Related
Expand Down
Binary file modified docs/next/public/objects.inv
Binary file not shown.
19 changes: 19 additions & 0 deletions docs/sphinx/sections/api/apidocs/libraries/dagster-aws.rst
Original file line number Diff line number Diff line change
Expand Up @@ -98,9 +98,28 @@ Resources which surface SecretsManager secrets for use in Dagster resources and
Pipes
--------------

Context Injectors
^^^^^^^^^^^^^^^^^

.. autoclass:: dagster_aws.pipes.PipesS3ContextInjector

.. autoclass:: dagster_aws.pipes.PipesLambdaEventContextInjector

Message Readers
^^^^^^^^^^^^^^^

.. autoclass:: dagster_aws.pipes.PipesS3MessageReader

.. autoclass:: dagster_aws.pipes.PipesCloudWatchMessageReader
:members: consume_cloudwatch_logs

Clients
^^^^^^^

.. autoclass:: dagster_aws.pipes.PipesLambdaClient

.. autoclass:: dagster_aws.pipes.PipesGlueClient
:members: run

Legacy
--------
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ def glue_pipes_asset(
# start_definitions_marker

from dagster import Definitions # noqa
from dagster_aws.pipes import PipesGlueContextInjector, PipesS3MessageReader
from dagster_aws.pipes import PipesGlueContextInjector


bucket = os.environ["DAGSTER_GLUE_S3_CONTEXT_BUCKET"]
Expand All @@ -39,9 +39,6 @@ def glue_pipes_asset(
client=boto3.client("s3"),
bucket=bucket,
),
message_reader=PipesS3MessageReader(
client=boto3.client("s3"), bucket=bucket
),
)
},
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,23 +2,24 @@
from dagster_pipes import (
PipesCliArgsParamsLoader,
PipesS3ContextLoader,
PipesS3MessageWriter,
open_dagster_pipes,
)

client = boto3.client("s3")
context_loader = PipesS3ContextLoader(client)
message_writer = PipesS3MessageWriter(client)
params_loader = PipesCliArgsParamsLoader()


def main():
with open_dagster_pipes(
context_loader=context_loader,
message_writer=message_writer,
params_loader=params_loader,
) as pipes:
pipes.log.info("Hello from AWS Glue job!")
pipes.report_asset_materialization(
metadata={"some_metric": {"raw_value": 0, "type": "int"}},
data_version="alpha",
)


if __name__ == "__main__":
Expand Down
29 changes: 25 additions & 4 deletions python_modules/libraries/dagster-aws/dagster_aws/pipes.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,6 +204,19 @@ def consume_cloudwatch_logs(
start_time: Optional[int] = None,
end_time: Optional[int] = None,
) -> None:
"""Reads logs from AWS CloudWatch and forwards them to Dagster for events extraction and logging.
Args:
log_group (str): CloudWatch log group name
log_stream (str): CLoudWatch log stream name
start_time (Optional[int]): The start of the time range, expressed as the number of
milliseconds after Jan 1, 1970 00:00:00 UTC. Events with a timestamp equal to this
time or later than this time are included.
Events with a timestamp earlier than this time are not included.
end_time (Optional[int]): The end of the time range, expressed as the number of
milliseconds after Jan 1, 1970 00:00:00 UTC. Events with a timestamp equal to or
later than this time are not included.
"""
handler = check.not_none(
self._handler, "Can only consume logs within context manager scope."
)
Expand All @@ -216,7 +229,7 @@ def consume_cloudwatch_logs(
extract_message_or_forward_to_stdout(handler, log_line)

def no_messages_debug_text(self) -> str:
return "Attempted to read messages by extracting them from the tail of CloudWatch logs directly."
return "Attempted to read messages by extracting them from CloudWatch logs directly."

def _get_all_cloudwatch_events(
self,
Expand Down Expand Up @@ -247,6 +260,10 @@ def _get_all_cloudwatch_events(


class PipesLambdaEventContextInjector(PipesEnvContextInjector):
"""Injects context via AWS Lambda event input.
Should be paired with :py:class`~dagster_pipes.PipesMappingParamsLoader` on the Lambda side.
"""

def no_messages_debug_text(self) -> str:
return "Attempted to inject context via the lambda event input."

Expand All @@ -255,7 +272,7 @@ class PipesLambdaClient(PipesClient, TreatAsResourceParam):
"""A pipes client for invoking AWS lambda.
By default context is injected via the lambda input event and messages are parsed out of the
4k tail of logs. S3
4k tail of logs.
Args:
client (boto3.client): The boto lambda client used to call invoke.
Expand Down Expand Up @@ -352,6 +369,10 @@ class PipesGlueClient(PipesClient, TreatAsResourceParam):
context into the Glue job, for example, :py:class:`PipesGlueContextInjector`.
message_reader (Optional[PipesMessageReader]): A message reader to use to read messages
from the glue job run. Defaults to :py:class:`PipesGlueLogsMessageReader`.
When provided with :py:class:`PipesCloudWatchMessageReader`,
it will be used to recieve logs and events from the `.../output/<job-run-id>`
CloudWatch log stream created by AWS Glue. Note that AWS Glue routes both
`stderr` and `stdout` from the main job process into this LogStream.
client (Optional[boto3.client]): The boto Glue client used to launch the Glue job
"""

Expand Down Expand Up @@ -468,8 +489,8 @@ def run(

if isinstance(self._message_reader, PipesCloudWatchMessageReader):
# TODO: consume messages in real-time via a background thread
# so we don't have to wait for the job run to complete
# before receiving any logs
# so we don't have to wait for the job run to complete
# before receiving any logs
self._message_reader.consume_cloudwatch_logs(
f"{log_group}/output", run_id, start_time=int(start_timestamp)
)
Expand Down

0 comments on commit 0c307aa

Please sign in to comment.