-
Notifications
You must be signed in to change notification settings - Fork 49
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Revert "feat: use PlugCaisson instead of custom CompressedReaderBody (#…
- Loading branch information
Showing
5 changed files
with
125 additions
and
3 deletions.
There are no files selected for viewing
68 changes: 68 additions & 0 deletions
68
lib/logflare_web/controllers/plugs/compressed_body_reader.ex
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,68 @@ | ||
defmodule LogflareWeb.Plugs.CompressedBodyReader do | ||
@moduledoc """ | ||
Gzip chunking is manually handled using inspiration from [sneako/plug_compressed_body_reader](https://github.com/sneako/plug_compressed_body_reader/blob/main/lib/plug_compressed_body_reader/gzip.ex) | ||
""" | ||
|
||
def read_body(conn, opts \\ []) do | ||
content_encoding = Plug.Conn.get_req_header(conn, "content-encoding") | ||
|
||
with {:ok, body, conn} <- Plug.Conn.read_body(conn, opts) do | ||
case try_decompress(body, content_encoding) do | ||
{:ok, data} -> {:ok, data, conn} | ||
{:more, data} -> {:more, data, conn} | ||
{:error, _} = error -> error | ||
end | ||
end | ||
end | ||
|
||
defp try_decompress(data, []), do: {:ok, data} | ||
defp try_decompress(data, ["gzip"]), do: gunzip(data) | ||
defp try_decompress(data, ["deflate"]), do: inflate(data) | ||
|
||
@max_wbits 15 | ||
@max_chunk_count 25 | ||
|
||
defp gunzip(data), do: safe_gunzip(data, @max_wbits + 16) | ||
defp inflate(data), do: safe_gunzip(data, @max_wbits) | ||
|
||
defp safe_gunzip(data, window_bits) do | ||
z = :zlib.open() | ||
|
||
try do | ||
:zlib.inflateInit(z, window_bits) | ||
result = chunked_inflate(z, data) | ||
:zlib.inflateEnd(z) | ||
|
||
result | ||
after | ||
:zlib.close(z) | ||
else | ||
{:finished, data} -> {:ok, IO.iodata_to_binary(data)} | ||
{:continue, data} -> {:more, IO.iodata_to_binary(data)} | ||
{:need_dictionary, _, _} -> {:error, :not_supported} | ||
end | ||
end | ||
|
||
defp chunked_inflate(_res, _z, curr_chunk, _acc) when curr_chunk == @max_chunk_count do | ||
raise RuntimeError, "max chunks reached" | ||
end | ||
|
||
defp chunked_inflate({:finished, output}, _z, _curr_chunk, acc) do | ||
{:finished, Enum.reverse([output | acc])} | ||
end | ||
|
||
defp chunked_inflate({:continue, output}, z, curr_chunk, acc) do | ||
z | ||
|> :zlib.safeInflate([]) | ||
|> chunked_inflate(z, curr_chunk + 1, [output | acc]) | ||
end | ||
|
||
# initial | ||
defp chunked_inflate(z, data) when is_binary(data) do | ||
z | ||
|> :zlib.safeInflate(data) | ||
|> chunked_inflate(z, 0, []) | ||
end | ||
end |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,56 @@ | ||
defmodule LogflareWeb.Plugs.CompressedBodyReaderTest do | ||
use ExUnit.Case, async: true | ||
use ExUnitProperties | ||
|
||
@subject LogflareWeb.Plugs.CompressedBodyReader | ||
|
||
doctest @subject | ||
|
||
def conn(body, headers \\ []) do | ||
conn = Plug.Test.conn("POST", "/example", body) | ||
|
||
Enum.reduce(headers, conn, fn {key, value}, conn -> | ||
Plug.Conn.put_req_header(conn, key, value) | ||
end) | ||
end | ||
|
||
property "with no `content-encoding` header data is passed through as is" do | ||
check all(data <- gen_payloads()) do | ||
assert {:ok, read, _} = @subject.read_body(conn(data)) | ||
assert read == data | ||
end | ||
end | ||
|
||
property "with `content-encoding: gzip` header data is passed through as is" do | ||
check all(data <- gen_payloads()) do | ||
compressed = :zlib.gzip(data) | ||
conn = conn(compressed, [{"content-encoding", "gzip"}]) | ||
|
||
assert {:ok, read, _} = @subject.read_body(conn) | ||
assert read == data | ||
end | ||
end | ||
|
||
property "gzipped data with overly large size raises RuntimeError" do | ||
check all(data <- gen_max_chunk_payloads()) do | ||
compressed = :zlib.gzip(data) | ||
conn = conn(compressed, [{"content-encoding", "gzip"}]) | ||
|
||
assert_raise RuntimeError, "max chunks reached", fn -> | ||
@subject.read_body(conn) | ||
end | ||
end | ||
end | ||
|
||
defp gen_payloads do | ||
gen all(res <- scale(binary(), &(&1 * 500))) do | ||
res | ||
end | ||
end | ||
|
||
defp gen_max_chunk_payloads do | ||
gen all(res <- binary(max_length: 500_000, min_length: 400_000)) do | ||
res | ||
end | ||
end | ||
end |