Skip to content

Commit

Permalink
Revert "feat: use PlugCaisson instead of custom CompressedReaderBody (#…
Browse files Browse the repository at this point in the history
…1900)"

This reverts commit 0b57d4b.
  • Loading branch information
Ziinc authored Dec 29, 2023
1 parent 0b57d4b commit 1379248
Show file tree
Hide file tree
Showing 5 changed files with 125 additions and 3 deletions.
68 changes: 68 additions & 0 deletions lib/logflare_web/controllers/plugs/compressed_body_reader.ex
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
defmodule LogflareWeb.Plugs.CompressedBodyReader do
@moduledoc """
Gzip chunking is manually handled using inspiration from [sneako/plug_compressed_body_reader](https://github.com/sneako/plug_compressed_body_reader/blob/main/lib/plug_compressed_body_reader/gzip.ex)
"""

def read_body(conn, opts \\ []) do
content_encoding = Plug.Conn.get_req_header(conn, "content-encoding")

with {:ok, body, conn} <- Plug.Conn.read_body(conn, opts) do
case try_decompress(body, content_encoding) do
{:ok, data} -> {:ok, data, conn}
{:more, data} -> {:more, data, conn}
{:error, _} = error -> error
end
end
end

defp try_decompress(data, []), do: {:ok, data}
defp try_decompress(data, ["gzip"]), do: gunzip(data)
defp try_decompress(data, ["deflate"]), do: inflate(data)

@max_wbits 15
@max_chunk_count 25

defp gunzip(data), do: safe_gunzip(data, @max_wbits + 16)
defp inflate(data), do: safe_gunzip(data, @max_wbits)

defp safe_gunzip(data, window_bits) do
z = :zlib.open()

try do
:zlib.inflateInit(z, window_bits)
result = chunked_inflate(z, data)
:zlib.inflateEnd(z)

result
after
:zlib.close(z)
else
{:finished, data} -> {:ok, IO.iodata_to_binary(data)}
{:continue, data} -> {:more, IO.iodata_to_binary(data)}
{:need_dictionary, _, _} -> {:error, :not_supported}
end
end

defp chunked_inflate(_res, _z, curr_chunk, _acc) when curr_chunk == @max_chunk_count do
raise RuntimeError, "max chunks reached"
end

defp chunked_inflate({:finished, output}, _z, _curr_chunk, acc) do
{:finished, Enum.reverse([output | acc])}
end

defp chunked_inflate({:continue, output}, z, curr_chunk, acc) do
z
|> :zlib.safeInflate([])
|> chunked_inflate(z, curr_chunk + 1, [output | acc])
end

# initial
defp chunked_inflate(z, data) when is_binary(data) do
z
|> :zlib.safeInflate(data)
|> chunked_inflate(z, 0, [])
end
end
2 changes: 1 addition & 1 deletion lib/logflare_web/endpoint.ex
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ defmodule LogflareWeb.Endpoint do
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Jason,
body_reader: {PlugCaisson, :read_body, []}
body_reader: {LogflareWeb.Plugs.CompressedBodyReader, :read_body, []}
)

plug(Plug.MethodOverride)
Expand Down
1 change: 0 additions & 1 deletion mix.exs
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,6 @@ defmodule Logflare.Mixfile do
{:bandit, ">= 0.7.7"},
{:plug_crypto, "~> 1.2.2"},
{:cors_plug, "~> 2.0"},
{:plug_caisson, "~> 0.1"},

# Oauth
{:ueberauth_google, "~> 0.8"},
Expand Down
1 change: 0 additions & 1 deletion mix.lock
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,6 @@
"phoenix_template": {:hex, :phoenix_template, "1.0.3", "32de561eefcefa951aead30a1f94f1b5f0379bc9e340bb5c667f65f1edfa4326", [:mix], [{:phoenix_html, "~> 2.14.2 or ~> 3.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}], "hexpm", "16f4b6588a4152f3cc057b9d0c0ba7e82ee23afa65543da535313ad8d25d8e2c"},
"phoenix_view": {:hex, :phoenix_view, "2.0.3", "4d32c4817fce933693741deeb99ef1392619f942633dde834a5163124813aad3", [:mix], [{:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}], "hexpm", "cd34049af41be2c627df99cd4eaa71fc52a328c0c3d8e7d4aa28f880c30e7f64"},
"plug": {:hex, :plug, "1.15.2", "94cf1fa375526f30ff8770837cb804798e0045fd97185f0bb9e5fcd858c792a3", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.1.1 or ~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "02731fa0c2dcb03d8d21a1d941bdbbe99c2946c0db098eee31008e04c6283615"},
"plug_caisson": {:hex, :plug_caisson, "0.1.0", "dcc6dd3fe2ace44da57af0f9c1cf5995d7525abd794e2c22d2fd612b02a57ada", [:mix], [{:brotli, "~> 0.3.2", [hex: :brotli, repo: "hexpm", optional: true]}, {:ezstd, "~> 1.0", [hex: :ezstd, repo: "hexpm", optional: true]}, {:plug, "~> 1.15", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "f8ca939dda4c78f1164ebe23e051d2618eaa8124bbbeb55c424b7c965c757569"},
"plug_crypto": {:hex, :plug_crypto, "1.2.5", "918772575e48e81e455818229bf719d4ab4181fcbf7f85b68a35620f78d89ced", [:mix], [], "hexpm", "26549a1d6345e2172eb1c233866756ae44a9609bd33ee6f99147ab3fd87fd842"},
"poison": {:hex, :poison, "5.0.0", "d2b54589ab4157bbb82ec2050757779bfed724463a544b6e20d79855a9e43b24", [:mix], [{:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "11dc6117c501b80c62a7594f941d043982a1bd05a1184280c0d9166eb4d8d3fc"},
"postgrex": {:hex, :postgrex, "0.17.3", "c92cda8de2033a7585dae8c61b1d420a1a1322421df84da9a82a6764580c503d", [:mix], [{:db_connection, "~> 2.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.5 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:table, "~> 0.1.0", [hex: :table, repo: "hexpm", optional: true]}], "hexpm", "946cf46935a4fdca7a81448be76ba3503cff082df42c6ec1ff16a4bdfbfb098d"},
Expand Down
56 changes: 56 additions & 0 deletions test/logflare_web/plugs/compressed_body_reader_test.exs
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
defmodule LogflareWeb.Plugs.CompressedBodyReaderTest do
use ExUnit.Case, async: true
use ExUnitProperties

@subject LogflareWeb.Plugs.CompressedBodyReader

doctest @subject

def conn(body, headers \\ []) do
conn = Plug.Test.conn("POST", "/example", body)

Enum.reduce(headers, conn, fn {key, value}, conn ->
Plug.Conn.put_req_header(conn, key, value)
end)
end

property "with no `content-encoding` header data is passed through as is" do
check all(data <- gen_payloads()) do
assert {:ok, read, _} = @subject.read_body(conn(data))
assert read == data
end
end

property "with `content-encoding: gzip` header data is passed through as is" do
check all(data <- gen_payloads()) do
compressed = :zlib.gzip(data)
conn = conn(compressed, [{"content-encoding", "gzip"}])

assert {:ok, read, _} = @subject.read_body(conn)
assert read == data
end
end

property "gzipped data with overly large size raises RuntimeError" do
check all(data <- gen_max_chunk_payloads()) do
compressed = :zlib.gzip(data)
conn = conn(compressed, [{"content-encoding", "gzip"}])

assert_raise RuntimeError, "max chunks reached", fn ->
@subject.read_body(conn)
end
end
end

defp gen_payloads do
gen all(res <- scale(binary(), &(&1 * 500))) do
res
end
end

defp gen_max_chunk_payloads do
gen all(res <- binary(max_length: 500_000, min_length: 400_000)) do
res
end
end
end

0 comments on commit 1379248

Please sign in to comment.