diff --git a/DESCRIPTION b/DESCRIPTION
index 17cebde1..ba8ed61e 100644
--- a/DESCRIPTION
+++ b/DESCRIPTION
@@ -71,6 +71,7 @@ Collate:
     'provider-claude.R'
     'provider-cortex.R'
     'provider-databricks.R'
+    'provider-deepseek.R'
     'provider-gemini.R'
     'provider-github.R'
     'provider-groq.R'
diff --git a/NAMESPACE b/NAMESPACE
index 721c77a0..80e27533 100644
--- a/NAMESPACE
+++ b/NAMESPACE
@@ -19,6 +19,7 @@ export(chat_bedrock)
 export(chat_claude)
 export(chat_cortex)
 export(chat_databricks)
+export(chat_deepseek)
 export(chat_gemini)
 export(chat_github)
 export(chat_groq)
diff --git a/NEWS.md b/NEWS.md
index eebabc00..55f491c1 100644
--- a/NEWS.md
+++ b/NEWS.md
@@ -1,5 +1,7 @@
 # ellmer (development version)
 
+* `chat_deepseek()` provides support for DeepSeek models (#242)
+
 * `print(Chat)` no longer wraps long lines, making it easier to read code and bulleted lists (#246).
 
 * `chat_openai()` should be less likely to timeout when not streaming chat results (#213).
diff --git a/R/provider-deepseek.R b/R/provider-deepseek.R
new file mode 100644
index 00000000..a2b65dcf
--- /dev/null
+++ b/R/provider-deepseek.R
@@ -0,0 +1,88 @@
+#' @include provider-openai.R
+NULL
+
+#' Chat with a model hosted on DeepSeek
+#'
+#' @description
+#' Sign up at <https://platform.deepseek.com>.
+#'
+#' This function is a lightweight wrapper around [chat_openai()] with
+#' default settings tailored for DeepSeek.
+#'
+#' ## Known limitations
+#'
+#' * Structured data extraction is not supported..
+#' * Function calling is currently [unstable](https://api-docs.deepseek.com/guides/function_calling).
+#' * Images are not supported.
+#'
+#' @export
+#' @family chatbots
+#' @inheritParams chat_openai
+#' @inherit chat_openai return
+#' @examples
+#' \dontrun{
+#' chat <- chat_deepseek()
+#' chat$chat("Tell me three jokes about statisticians")
+#' }
+chat_deepseek <- function(system_prompt = NULL,
+                          turns = NULL,
+                          api_key = deepseek_key(),
+                          model = NULL,
+                          seed = NULL,
+                          api_args = list(),
+                          echo = NULL) {
+
+  turns <- normalize_turns(turns, system_prompt)
+  model <- set_default(model, "deepseek-chat")
+  echo <- check_echo(echo)
+
+  if (is_testing() && is.null(seed)) {
+    seed <- seed %||% 1014
+  }
+
+  provider <- ProviderDeepSeek(
+    base_url = "https://api.deepseek.com",
+    model = model,
+    seed = seed,
+    extra_args = api_args,
+    api_key = api_key
+  )
+  Chat$new(provider = provider, turns = turns, echo = echo)
+}
+
+ProviderDeepSeek <- new_class("ProviderDeepSeek", parent = ProviderOpenAI)
+
+method(as_json, list(ProviderDeepSeek, ContentText)) <- function(provider, x) {
+  x@text
+}
+
+method(as_json, list(ProviderDeepSeek, Turn)) <- function(provider, x) {
+  if (x@role == "user") {
+    # Text and tool results go in separate messages
+    texts <- keep(x@contents, S7_inherits, ContentText)
+    texts_out <- lapply(texts, function(text) {
+      list(role = "user", content = as_json(provider, text))
+    })
+
+    tools <- keep(x@contents, S7_inherits, ContentToolResult)
+    tools_out <- lapply(tools, function(tool) {
+      list(role = "tool", content = tool_string(tool), tool_call_id = tool@id)
+    })
+
+    c(texts_out, tools_out)
+  } else if (x@role == "assistant") {
+    # Tool requests come out of content and go into own argument
+    text <- detect(x@contents, S7_inherits, ContentText)
+    tools <- keep(x@contents, S7_inherits, ContentToolRequest)
+
+    list(compact(list(
+      role = "assistant",
+      content = as_json(provider, text),
+      tool_calls = as_json(provider, tools)
+    )))
+  } else {
+    as_json(super(provider, ProviderOpenAI), x)
+  }
+}
+
+deepseek_key <- function() key_get("DEEPSEEK_API_KEY")
diff --git a/R/provider-openai.R b/R/provider-openai.R
index a50c662a..3abfd005 100644
--- a/R/provider-openai.R
+++ b/R/provider-openai.R
@@ -116,6 +116,8 @@ method(chat_request, ProviderOpenAI) <- function(provider,
   req <- req_error(req, body = function(resp) {
     if (resp_content_type(resp) == "application/json") {
       resp_body_json(resp)$error$message
+    } else if (resp_content_type(resp) == "text/plain") {
+      resp_body_string(resp)
     }
   })
 
diff --git a/README.Rmd b/README.Rmd
index 58bfb12f..0695746e 100644
--- a/README.Rmd
+++ b/README.Rmd
@@ -41,6 +41,7 @@ ellmer supports a wide variety of model providers:
 * AWS Bedrock: `chat_bedrock()`.
 * Azure OpenAI: `chat_azure()`.
 * Databricks: `chat_databricks()`.
+* DeepSeek: `chat_deepseek()`.
 * GitHub model marketplace: `chat_github()`.
 * Google Gemini: `chat_gemini()`.
 * Groq: `chat_groq()`.
diff --git a/README.md b/README.md
index f620529b..bc3f2bf2 100644
--- a/README.md
+++ b/README.md
@@ -34,6 +34,7 @@ ellmer supports a wide variety of model providers:
 - AWS Bedrock: `chat_bedrock()`.
 - Azure OpenAI: `chat_azure()`.
 - Databricks: `chat_databricks()`.
+- DeepSeek: `chat_deepseek()`.
 - GitHub model marketplace: `chat_github()`.
 - Google Gemini: `chat_gemini()`.
 - Groq: `chat_groq()`.
diff --git a/man/chat_bedrock.Rd b/man/chat_bedrock.Rd
index b47da0f1..d8d0fb20 100644
--- a/man/chat_bedrock.Rd
+++ b/man/chat_bedrock.Rd
@@ -59,6 +59,7 @@ Other chatbots:
 \code{\link{chat_claude}()},
 \code{\link{chat_cortex}()},
 \code{\link{chat_databricks}()},
+\code{\link{chat_deepseek}()},
 \code{\link{chat_gemini}()},
 \code{\link{chat_github}()},
 \code{\link{chat_groq}()},
diff --git a/man/chat_claude.Rd b/man/chat_claude.Rd
index 2bc6bc96..4e0a9424 100644
--- a/man/chat_claude.Rd
+++ b/man/chat_claude.Rd
@@ -73,6 +73,7 @@ Other chatbots:
 \code{\link{chat_bedrock}()},
 \code{\link{chat_cortex}()},
 \code{\link{chat_databricks}()},
+\code{\link{chat_deepseek}()},
 \code{\link{chat_gemini}()},
 \code{\link{chat_github}()},
 \code{\link{chat_groq}()},
diff --git a/man/chat_cortex.Rd b/man/chat_cortex.Rd
index 23bf78ca..5e3dc4e2 100644
--- a/man/chat_cortex.Rd
+++ b/man/chat_cortex.Rd
@@ -82,6 +82,7 @@ Other chatbots:
 \code{\link{chat_bedrock}()},
 \code{\link{chat_claude}()},
 \code{\link{chat_databricks}()},
+\code{\link{chat_deepseek}()},
 \code{\link{chat_gemini}()},
 \code{\link{chat_github}()},
 \code{\link{chat_groq}()},
diff --git a/man/chat_databricks.Rd b/man/chat_databricks.Rd
index eba75e41..f3d87fb9 100644
--- a/man/chat_databricks.Rd
+++ b/man/chat_databricks.Rd
@@ -87,6 +87,7 @@ Other chatbots:
 \code{\link{chat_bedrock}()},
 \code{\link{chat_claude}()},
 \code{\link{chat_cortex}()},
+\code{\link{chat_deepseek}()},
 \code{\link{chat_gemini}()},
 \code{\link{chat_github}()},
 \code{\link{chat_groq}()},
diff --git a/man/chat_deepseek.Rd b/man/chat_deepseek.Rd
new file mode 100644
index 00000000..d8552c1a
--- /dev/null
+++ b/man/chat_deepseek.Rd
@@ -0,0 +1,83 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/provider-deepseek.R
+\name{chat_deepseek}
+\alias{chat_deepseek}
+\title{Chat with a model hosted on DeepSeek}
+\usage{
+chat_deepseek(
+  system_prompt = NULL,
+  turns = NULL,
+  api_key = deepseek_key(),
+  model = NULL,
+  seed = NULL,
+  api_args = list(),
+  echo = NULL
+)
+}
+\arguments{
+\item{system_prompt}{A system prompt to set the behavior of the assistant.}
+
+\item{turns}{A list of \link{Turn}s to start the chat with (i.e., continuing a
+previous conversation). If not provided, the conversation begins from
+scratch.}
+
+\item{api_key}{The API key to use for authentication. You generally should
+not supply this directly, but instead set the \code{OPENAI_API_KEY} environment
+variable.}
+
+\item{model}{The model to use for the chat. The default, \code{NULL}, will pick
+a reasonable default, and tell you about. We strongly recommend explicitly
+choosing a model for all but the most casual use.}
+
+\item{seed}{Optional integer seed that ChatGPT uses to try and make output
+more reproducible.}
+
+\item{api_args}{Named list of arbitrary extra arguments appended to the body
+of every chat API call.}
+
+\item{echo}{One of the following options:
+\itemize{
+\item \code{none}: don't emit any output (default when running in a function).
+\item \code{text}: echo text output as it streams in (default when running at
+the console).
+\item \code{all}: echo all input and output.
+}
+
+Note this only affects the \code{chat()} method.}
+}
+\value{
+A \link{Chat} object.
+}
+\description{
+Sign up at \url{https://platform.deepseek.com}.
+
+This function is a lightweight wrapper around \code{\link[=chat_openai]{chat_openai()}} with
+default settings tailored for DeepSeek.
+\subsection{Known limitations}{
+\itemize{
+\item Structured data extraction is not supported..
+\item Function calling is currently \href{https://api-docs.deepseek.com/guides/function_calling}{unstable}.
+\item Images are not supported.
+}
+}
+}
+\examples{
+\dontrun{
+chat <- chat_deepseek()
+chat$chat("Tell me three jokes about statisticians")
+}
+}
+\seealso{
+Other chatbots: 
+\code{\link{chat_bedrock}()},
+\code{\link{chat_claude}()},
+\code{\link{chat_cortex}()},
+\code{\link{chat_databricks}()},
+\code{\link{chat_gemini}()},
+\code{\link{chat_github}()},
+\code{\link{chat_groq}()},
+\code{\link{chat_ollama}()},
+\code{\link{chat_openai}()},
+\code{\link{chat_perplexity}()}
+}
+\concept{chatbots}
diff --git a/man/chat_gemini.Rd b/man/chat_gemini.Rd
index 612eb916..a0bee100 100644
--- a/man/chat_gemini.Rd
+++ b/man/chat_gemini.Rd
@@ -65,6 +65,7 @@ Other chatbots:
 \code{\link{chat_claude}()},
 \code{\link{chat_cortex}()},
 \code{\link{chat_databricks}()},
+\code{\link{chat_deepseek}()},
 \code{\link{chat_github}()},
 \code{\link{chat_groq}()},
 \code{\link{chat_ollama}()},
diff --git a/man/chat_github.Rd b/man/chat_github.Rd
index cb219f4f..7adcc615 100644
--- a/man/chat_github.Rd
+++ b/man/chat_github.Rd
@@ -74,6 +74,7 @@ Other chatbots:
 \code{\link{chat_claude}()},
 \code{\link{chat_cortex}()},
 \code{\link{chat_databricks}()},
+\code{\link{chat_deepseek}()},
 \code{\link{chat_gemini}()},
 \code{\link{chat_groq}()},
 \code{\link{chat_ollama}()},
diff --git a/man/chat_groq.Rd b/man/chat_groq.Rd
index 881d7012..101b5781 100644
--- a/man/chat_groq.Rd
+++ b/man/chat_groq.Rd
@@ -71,6 +71,7 @@ Other chatbots:
 \code{\link{chat_claude}()},
 \code{\link{chat_cortex}()},
 \code{\link{chat_databricks}()},
+\code{\link{chat_deepseek}()},
 \code{\link{chat_gemini}()},
 \code{\link{chat_github}()},
 \code{\link{chat_ollama}()},
diff --git a/man/chat_ollama.Rd b/man/chat_ollama.Rd
index cd74a75c..0711b393 100644
--- a/man/chat_ollama.Rd
+++ b/man/chat_ollama.Rd
@@ -76,6 +76,7 @@ Other chatbots:
 \code{\link{chat_claude}()},
 \code{\link{chat_cortex}()},
 \code{\link{chat_databricks}()},
+\code{\link{chat_deepseek}()},
 \code{\link{chat_gemini}()},
 \code{\link{chat_github}()},
 \code{\link{chat_groq}()},
diff --git a/man/chat_openai.Rd b/man/chat_openai.Rd
index 437c3bfa..eca09e0c 100644
--- a/man/chat_openai.Rd
+++ b/man/chat_openai.Rd
@@ -80,6 +80,7 @@ Other chatbots:
 \code{\link{chat_claude}()},
 \code{\link{chat_cortex}()},
 \code{\link{chat_databricks}()},
+\code{\link{chat_deepseek}()},
 \code{\link{chat_gemini}()},
 \code{\link{chat_github}()},
 \code{\link{chat_groq}()},
diff --git a/man/chat_perplexity.Rd b/man/chat_perplexity.Rd
index 49dfcd7e..37b1e0b0 100644
--- a/man/chat_perplexity.Rd
+++ b/man/chat_perplexity.Rd
@@ -74,6 +74,7 @@ Other chatbots:
 \code{\link{chat_claude}()},
 \code{\link{chat_cortex}()},
 \code{\link{chat_databricks}()},
+\code{\link{chat_deepseek}()},
 \code{\link{chat_gemini}()},
 \code{\link{chat_github}()},
 \code{\link{chat_groq}()},
diff --git a/tests/testthat/_snaps/provider-deepseek.md b/tests/testthat/_snaps/provider-deepseek.md
new file mode 100644
index 00000000..295ed087
--- /dev/null
+++ b/tests/testthat/_snaps/provider-deepseek.md
@@ -0,0 +1,7 @@
+# defaults are reported
+
+    Code
+      . <- chat_deepseek()
+    Message
+      Using model = "deepseek-chat".
+
diff --git a/tests/testthat/test-provider-deepseek.R b/tests/testthat/test-provider-deepseek.R
new file mode 100644
index 00000000..d367ccf0
--- /dev/null
+++ b/tests/testthat/test-provider-deepseek.R
@@ -0,0 +1,52 @@
+# Getting started --------------------------------------------------------
+
+test_that("can make simple request", {
+  chat <- chat_deepseek("Be as terse as possible; no punctuation")
+  resp <- chat$chat("What is 1 + 1?", echo = FALSE)
+  expect_match(resp, "2")
+  expect_equal(chat$last_turn()@tokens, c(20, 1))
+})
+
+test_that("can make simple streaming request", {
+  chat <- chat_deepseek("Be as terse as possible; no punctuation")
+  resp <- coro::collect(chat$stream("What is 1 + 1?"))
+  expect_match(paste0(unlist(resp), collapse = ""), "2")
+})
+
+# Common provider interface -----------------------------------------------
+
+test_that("defaults are reported", {
+  expect_snapshot(. <- chat_deepseek())
+})
+
+test_that("respects turns interface", {
+  chat_fun <- chat_deepseek
+
+  test_turns_system(chat_fun)
+  test_turns_existing(chat_fun)
+})
+
+# Only partially works
+# test_that("all tool variations work", {
+#   chat_fun <- chat_deepseek
+
+#   test_tools_simple(chat_fun)
+#   test_tools_async(chat_fun)
+#   test_tools_parallel(chat_fun)
+#   test_tools_sequential(chat_fun, total_calls = 6)
+# })
+
+# # Doesn't support data extraction
+# test_that("can extract data", {
+#   chat_fun <- chat_deepseek
+
+#   test_data_extraction(chat_fun)
+# })
+
+# # Doesn't support images
+# test_that("can use images", {
+#   chat_fun <- chat_deepseek_test
+
+#   test_images_inline(chat_fun)
+#   test_images_remote(chat_fun)
+# })