diff --git a/snooty.toml b/snooty.toml
index 06d14a3a..0cc84953 100644
--- a/snooty.toml
+++ b/snooty.toml
@@ -16,6 +16,7 @@ toc_landing_pages = [
]
[constants]
+atlas-sp = "Atlas Stream Processing"
connector = "MongoDB Kafka Connector"
connector-short = "Kafka Connector"
connector-long = "MongoDB Connector for Apache Kafka"
@@ -25,6 +26,8 @@ kafka-connect-long = "Confluent Kafka Connect"
avro-long = "Apache Avro"
avro = "Avro"
avro-converter = "Kafka Connect Avro Converter (Avro Converter)"
+aws = ":abbr:`AWS (Amazon Web Services)`"
+azure = "Microsoft Azure"
protobuf-converter = "Kafka Connect Protobuf Converter"
json-schema-converter = "Kafka Connect JSON Schema Converter"
connector_version = "1.14"
@@ -34,6 +37,7 @@ connector_version_github_tag = "master"
connector_kafka_version_major = "2"
connector_kafka_version_minor = "6"
connector_kafka_version_docs = "https://kafka.apache.org/{+connector_kafka_version_major+}{+connector_kafka_version_minor+}"
+service = "Atlas"
sink-connector = "MongoDB Kafka sink connector"
source-connector = "MongoDB Kafka source connector"
sink-connector-title = "MongoDB Kafka Sink Connector"
@@ -59,3 +63,4 @@ jmx-port-mapping = "35000"
sandbox-directory = "kafka-edu/docs-examples/mongodb-kafka-base/"
win-sandbox-directory = "kafka-edu\\docs-examples\\mongodb-kafka-base\\"
cluster = "MongoDB cluster"
+clusters = "MongoDB clusters"
diff --git a/source/includes/atlas-sp.rst b/source/includes/atlas-sp.rst
new file mode 100644
index 00000000..840aba79
--- /dev/null
+++ b/source/includes/atlas-sp.rst
@@ -0,0 +1,9 @@
+.. note::
+
+ {+atlas-sp+} provides MongoDB-native tooling to
+ continuously process streaming data, validate schemas, and
+ materialize views into either {+service+} database collections or Apache
+ Kafka topics.
+
+ To learn more about {+atlas-sp+}, see {+service+}
+ `Stream Processing `__.
diff --git a/source/index.txt b/source/index.txt
index a6a30851..65a97bdd 100644
--- a/source/index.txt
+++ b/source/index.txt
@@ -18,6 +18,7 @@ MongoDB Kafka Connector
Security and Authentication
Monitoring
Migrate from the Community Connector
+ Compare Kafka Connector and Atlas Stream Processing
Troubleshooting
How to Contribute
Issues & Help
@@ -47,19 +48,7 @@ offerings to host your {+kafka+} cluster and {+connector+}:
- To learn more about the MongoDB Source Connector, read the `documentation `__.
- To learn more about the MongoDB Sink Connector, read the `documentation `__.
-.. note::
-
- You can also use Atlas Stream Processing, which is a MongoDB-native way to
- process streaming data by using the MongoDB Query API. It transforms the way
- that developers build modern applications.
-
- Use Atlas Stream Processing to continuously process streaming data,
- validate schemas, and materialize views into either Atlas database
- collections or Apache Kafka topics.
-
- To learn more about Atlas Stream Processing, see the
- `Atlas Stream Processing `__
- product page or read the `docs `__.
+.. include:: /includes/atlas-sp.rst
What's New
----------
diff --git a/source/introduction.txt b/source/introduction.txt
index f7aba52d..9f6cb7a1 100644
--- a/source/introduction.txt
+++ b/source/introduction.txt
@@ -20,3 +20,5 @@ Read the following sections to learn about the {+connector+}, {+kafka-connect+},
- :doc:`Connect to MongoDB `
- :doc:`Data Formats `
- :doc:`Converters `
+
+.. include:: /includes/atlas-sp.rst
diff --git a/source/kafka-connector-atlas-stream-processing-comparison.txt b/source/kafka-connector-atlas-stream-processing-comparison.txt
new file mode 100644
index 00000000..60c79a17
--- /dev/null
+++ b/source/kafka-connector-atlas-stream-processing-comparison.txt
@@ -0,0 +1,83 @@
+.. _kafka-connector-atlas-stream-processing-comparison:
+
+===================================================
+Compare {+connector-short+} and {+atlas-sp+}
+===================================================
+
+.. toctree::
+ :titlesonly:
+ :maxdepth: 2
+
+.. contents:: On this page
+ :local:
+ :backlinks: none
+ :depth: 2
+ :class: singlecol
+
+This section provides a comparison of the feature sets of the MongoDB
+{+connector-short+} and {+atlas-sp+} to help you identify which tool
+best suits your use case.
+
+.. list-table::
+ :header-rows: 1
+ :widths: 20 40 40
+
+ * - Feature
+ - {+connector-short+}
+ - {+atlas-sp+}
+
+ * - Supported Stream Processing Capabilities
+ - To process streaming data handled by {+connector-short+}, you must
+ do one of the following:
+ - Extend the {+connector-short+} with SMTs or custom Java
+ - Use or write external tooling
+ - Available through the MongoDB aggregation framework, with
+ extensions specific to stream processing.
+
+ * - Installation
+ - Installation required either locally or on Confluent.
+ - No installation required.
+
+ * - Connectivity Tooling
+ - MongoDB Java Driver required.
+ - Connection managed by {+service+}.
+
+ * - Hosting
+ - Hosting required for your {+kafka+} cluster and the Kafka
+ Connector. Use partner services, such as Confluent Cloud, Amazon
+ Managed Streaming, or Redpanda Cloud when possible.
+ - Stream processing functionality fully managed by {+service+}.
+ Hosting required for your {+kafka+} cluster.
+
+ * - Windowing
+ - No support for windowing. You must manually configure windowing
+ with the Kafka Streams API or other external tooling.
+ - Support for highly configurable
+ :atlas:`windows `.
+
+ * - Connection Type Support
+ - - Kafka clusters
+ - {+service+} databases
+ - {+service+} collections
+ - - Kafka clusters
+ - {+service+} {+clusters+}
+ - {+service+} databases
+ - {+service+} collections
+ - HTTPS
+
+ * - Security Features
+ - - SSL/TLS
+ - X.509
+ - {+aws+} IAM
+ - User must develop all other authentication tools
+ - - SSL/TLS
+ - X.509
+ - VPC Peering with {+aws+}
+ - Private Link with {+aws+} Confluent
+ - Private Link with {+aws+} MSK
+ - Private Link with {+azure+} Event Hub
+
+ * - Pricing
+ - Pricing dependent on your hosting provider.
+ - Hourly pricing managed by {+atlas-sp+}. Typical costs
+ approximately 25% of the cost of the {+connector-short+}.
diff --git a/source/migrate-from-kafka-connect-mongodb.txt b/source/migrate-from-kafka-connect-mongodb.txt
index a29e2c3a..cac27a9c 100644
--- a/source/migrate-from-kafka-connect-mongodb.txt
+++ b/source/migrate-from-kafka-connect-mongodb.txt
@@ -12,6 +12,8 @@ The following sections list the changes you must make to your Kafka
Connect sink connector configuration settings and custom classes to transition
to the {+sink-connector+}.
+.. include:: /includes/atlas-sp.rst
+
Update Configuration Settings
-----------------------------
diff --git a/source/quick-start.txt b/source/quick-start.txt
index 74a8edd0..1b43f6c1 100644
--- a/source/quick-start.txt
+++ b/source/quick-start.txt
@@ -17,6 +17,8 @@
.. meta::
:keywords: get started, tutorial, code example
+.. include:: /includes/atlas-sp.rst
+
Overview
--------
diff --git a/source/sink-connector.txt b/source/sink-connector.txt
index f9f2456c..1b58cd95 100644
--- a/source/sink-connector.txt
+++ b/source/sink-connector.txt
@@ -17,6 +17,8 @@ Sink Connector
:depth: 2
:class: singlecol
+.. include:: /includes/atlas-sp.rst
+
Overview
--------
diff --git a/source/source-connector.txt b/source/source-connector.txt
index 4ae04e0e..f67bbae9 100644
--- a/source/source-connector.txt
+++ b/source/source-connector.txt
@@ -18,6 +18,8 @@ Source Connector
:depth: 2
:class: singlecol
+.. include:: /includes/atlas-sp.rst
+
Overview
--------