Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@ and this project adheres to
- Shared doc lookup in clustered environments now works across nodes instead of
only searching locally
[#3910](https://github.com/OpenFn/lightning/issues/3910)
- Accept gzip-compressed payloads on
webhooks[#3937](https://github.com/OpenFn/lightning/issues/3937)

## [2.14.14] - 2025-11-05

Expand Down
1 change: 1 addition & 0 deletions lib/lightning_web/plug_configs.ex
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ defmodule LightningWeb.PlugConfigs do
}
],
pass: ["*/*"],
body_reader: {LightningWeb.Plugs.DecompressBody, :read_body, []},
json_decoder: Phoenix.json_library()
]
end
Expand Down
21 changes: 21 additions & 0 deletions lib/lightning_web/plugs/decompress_body.ex
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
defmodule LightningWeb.Plugs.DecompressBody do
@moduledoc """
A Plug body reader that decompresses gzipped request bodies. Checks for the `Content-Encoding: gzip` header and automatically
decompresses the request body before it's passed to the JSON parser.
"""
def read_body(conn, opts) do
case Plug.Conn.get_req_header(conn, "content-encoding") do
["gzip" | _] ->
case Plug.Conn.read_body(conn, opts) do
{:ok, body, conn} ->
{:ok, :zlib.gunzip(body), conn}

other ->
other
end

_ ->
Plug.Conn.read_body(conn, opts)
end
end
end
74 changes: 74 additions & 0 deletions test/lightning_web/plugs/decompress_body_test.exs
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
defmodule LightningWeb.Plugs.DecompressBodyTest do
use LightningWeb.ConnCase, async: true

alias LightningWeb.Plugs.DecompressBody

describe "read_body/2" do
test "decompresses gzipped request body", %{conn: conn} do
json_data = %{"traitors" => ["alan"], "faithfuls" => ["david"]}
json_string = Jason.encode!(json_data)

# Compress the JSON string
gzipped_body = :zlib.gzip(json_string)

# Simulate a connection with gzipped body
conn =
conn
|> put_req_header("content-encoding", "gzip")
|> Map.put(:body_params, %{})

# Mock the body reading by setting adapter data
conn = %{
conn
| adapter: {Plug.Adapters.Test.Conn, %{chunks: [gzipped_body]}}
}

# Call the decompress function
{:ok, decompressed_body, _conn} = DecompressBody.read_body(conn, [])

assert decompressed_body == json_string
end

test "passes through non-gzipped request body", %{conn: conn} do
json_data = %{"traitors" => ["alan"], "faithfuls" => ["david"]}
json_string = Jason.encode!(json_data)

# Simulate a connection without compression
conn =
conn
|> Map.put(:body_params, %{})

# Mock the body reading
conn = %{
conn
| adapter: {Plug.Adapters.Test.Conn, %{chunks: [json_string]}}
}

# Call the read_body function (should just read normally)
{:ok, body, _conn} = DecompressBody.read_body(conn, [])

assert body == json_string
end

test "handles multiple content-encoding headers", %{conn: conn} do
json_string = Jason.encode!(%{"test" => "data"})
gzipped_body = :zlib.gzip(json_string)

# Multiple headers with gzip first
conn =
conn
|> put_req_header("content-encoding", "gzip")
|> put_req_header("content-encoding", "identity")
|> Map.put(:body_params, %{})

conn = %{
conn
| adapter: {Plug.Adapters.Test.Conn, %{chunks: [gzipped_body]}}
}

{:ok, decompressed_body, _conn} = DecompressBody.read_body(conn, [])

assert decompressed_body == json_string
end
end
end