diff --git a/CHANGELOG.md b/CHANGELOG.md index 0503226c95..30f47591a1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -24,6 +24,8 @@ and this project adheres to - Shared doc lookup in clustered environments now works across nodes instead of only searching locally [#3910](https://github.com/OpenFn/lightning/issues/3910) +- Accept gzip-compressed payloads on + webhooks[#3937](https://github.com/OpenFn/lightning/issues/3937) ## [2.14.14] - 2025-11-05 diff --git a/lib/lightning_web/plug_configs.ex b/lib/lightning_web/plug_configs.ex index 0a175a806b..aba376975d 100644 --- a/lib/lightning_web/plug_configs.ex +++ b/lib/lightning_web/plug_configs.ex @@ -15,6 +15,7 @@ defmodule LightningWeb.PlugConfigs do } ], pass: ["*/*"], + body_reader: {LightningWeb.Plugs.DecompressBody, :read_body, []}, json_decoder: Phoenix.json_library() ] end diff --git a/lib/lightning_web/plugs/decompress_body.ex b/lib/lightning_web/plugs/decompress_body.ex new file mode 100644 index 0000000000..0be3ab4659 --- /dev/null +++ b/lib/lightning_web/plugs/decompress_body.ex @@ -0,0 +1,21 @@ +defmodule LightningWeb.Plugs.DecompressBody do + @moduledoc """ + A Plug body reader that decompresses gzipped request bodies. Checks for the `Content-Encoding: gzip` header and automatically + decompresses the request body before it's passed to the JSON parser. + """ + def read_body(conn, opts) do + case Plug.Conn.get_req_header(conn, "content-encoding") do + ["gzip" | _] -> + case Plug.Conn.read_body(conn, opts) do + {:ok, body, conn} -> + {:ok, :zlib.gunzip(body), conn} + + other -> + other + end + + _ -> + Plug.Conn.read_body(conn, opts) + end + end +end diff --git a/test/lightning_web/plugs/decompress_body_test.exs b/test/lightning_web/plugs/decompress_body_test.exs new file mode 100644 index 0000000000..bf8f0d6e1f --- /dev/null +++ b/test/lightning_web/plugs/decompress_body_test.exs @@ -0,0 +1,74 @@ +defmodule LightningWeb.Plugs.DecompressBodyTest do + use LightningWeb.ConnCase, async: true + + alias LightningWeb.Plugs.DecompressBody + + describe "read_body/2" do + test "decompresses gzipped request body", %{conn: conn} do + json_data = %{"traitors" => ["alan"], "faithfuls" => ["david"]} + json_string = Jason.encode!(json_data) + + # Compress the JSON string + gzipped_body = :zlib.gzip(json_string) + + # Simulate a connection with gzipped body + conn = + conn + |> put_req_header("content-encoding", "gzip") + |> Map.put(:body_params, %{}) + + # Mock the body reading by setting adapter data + conn = %{ + conn + | adapter: {Plug.Adapters.Test.Conn, %{chunks: [gzipped_body]}} + } + + # Call the decompress function + {:ok, decompressed_body, _conn} = DecompressBody.read_body(conn, []) + + assert decompressed_body == json_string + end + + test "passes through non-gzipped request body", %{conn: conn} do + json_data = %{"traitors" => ["alan"], "faithfuls" => ["david"]} + json_string = Jason.encode!(json_data) + + # Simulate a connection without compression + conn = + conn + |> Map.put(:body_params, %{}) + + # Mock the body reading + conn = %{ + conn + | adapter: {Plug.Adapters.Test.Conn, %{chunks: [json_string]}} + } + + # Call the read_body function (should just read normally) + {:ok, body, _conn} = DecompressBody.read_body(conn, []) + + assert body == json_string + end + + test "handles multiple content-encoding headers", %{conn: conn} do + json_string = Jason.encode!(%{"test" => "data"}) + gzipped_body = :zlib.gzip(json_string) + + # Multiple headers with gzip first + conn = + conn + |> put_req_header("content-encoding", "gzip") + |> put_req_header("content-encoding", "identity") + |> Map.put(:body_params, %{}) + + conn = %{ + conn + | adapter: {Plug.Adapters.Test.Conn, %{chunks: [gzipped_body]}} + } + + {:ok, decompressed_body, _conn} = DecompressBody.read_body(conn, []) + + assert decompressed_body == json_string + end + end +end