- Mastodon API: Added an endpoint to get multiple statuses by IDs (`GET /api/v1/statuses/?ids[]=1&ids[]=2`)
- ActivityPub: Add ActivityPub actor's `discoverable` parameter.
- Admin API: Added moderation log filters (user/start date/end date/search/pagination)
+- Reverse Proxy: Do not retry failed requests to limit pressure on the peer
### Changed
- Configuration: Filter.AnonymizeFilename added ability to retain file extension with custom text
build_cachex("scrubber", limit: 2500),
build_cachex("idempotency", expiration: idempotency_expiration(), limit: 2500),
build_cachex("web_resp", limit: 2500),
- build_cachex("emoji_packs", expiration: emoji_packs_expiration(), limit: 10)
+ build_cachex("emoji_packs", expiration: emoji_packs_expiration(), limit: 10),
+ build_cachex("failed_proxy_url", limit: 2500)
]
end
@valid_resp_codes [200, 206, 304]
@max_read_duration :timer.seconds(30)
@max_body_length :infinity
+ @failed_request_ttl :timer.seconds(60)
@methods ~w(GET HEAD)
@moduledoc """
* `max_read_duration` (default `#{inspect(@max_read_duration)}` ms): the total time the connection is allowed to
read from the remote upstream.
+ * `failed_request_ttl` (default `#{inspect(@failed_request_ttl)}` ms): the time the failed request is cached and cannot be retried.
+
* `inline_content_types`:
* `true` will not alter `content-disposition` (up to the upstream),
* `false` will add `content-disposition: attachment` to any request,
{:keep_user_agent, boolean}
| {:max_read_duration, :timer.time() | :infinity}
| {:max_body_length, non_neg_integer() | :infinity}
+ | {:failed_request_ttl, :timer.time() | :infinity}
| {:http, []}
| {:req_headers, [{String.t(), String.t()}]}
| {:resp_headers, [{String.t(), String.t()}]}
opts
end
- with {:ok, code, headers, client} <- request(method, url, req_headers, hackney_opts),
+ with {:ok, nil} <- Cachex.get(:failed_proxy_url_cache, url),
+ {:ok, code, headers, client} <- request(method, url, req_headers, hackney_opts),
:ok <-
header_length_constraint(
headers,
) do
response(conn, client, url, code, headers, opts)
else
+ {:ok, true} ->
+ conn
+ |> error_or_redirect(url, 500, "Request failed", opts)
+ |> halt()
+
{:ok, code, headers} ->
head_response(conn, url, code, headers, opts)
|> halt()
{:error, {:invalid_http_response, code}} ->
Logger.error("#{__MODULE__}: request to #{inspect(url)} failed with HTTP status #{code}")
+ track_failed_url(url, code, opts)
conn
|> error_or_redirect(
{:error, error} ->
Logger.error("#{__MODULE__}: request to #{inspect(url)} failed: #{inspect(error)}")
+ track_failed_url(url, error, opts)
conn
|> error_or_redirect(url, 500, "Request failed", opts)
end
defp client, do: Pleroma.ReverseProxy.Client
+
+ defp track_failed_url(url, code, opts) do
+ code = to_string(code)
+
+ ttl =
+ if code in ["403", "404"] or String.starts_with?(code, "5") do
+ Keyword.get(opts, :failed_request_ttl, @failed_request_ttl)
+ else
+ nil
+ end
+
+ Cachex.put(:failed_proxy_url_cache, url, true, ttl: ttl)
+ end
end
end)
end
+ describe "reverse proxy" do
+ test "do not track successful request", %{conn: conn} do
+ user_agent_mock("hackney/1.15.1", 2)
+ url = "/success"
+
+ conn = ReverseProxy.call(conn, url)
+
+ assert conn.status == 200
+ assert Cachex.get(:failed_proxy_url_cache, url) == {:ok, nil}
+ end
+ end
+
describe "user-agent" do
test "don't keep", %{conn: conn} do
user_agent_mock("hackney/1.15.1", 2)
user_agent_mock("hackney/1.15.1", 0)
assert capture_log(fn ->
- ReverseProxy.call(conn, "/user-agent", max_body_length: 4)
+ ReverseProxy.call(conn, "/huge-file", max_body_length: 4)
end) =~
- "[error] Elixir.Pleroma.ReverseProxy: request to \"/user-agent\" failed: :body_too_large"
+ "[error] Elixir.Pleroma.ReverseProxy: request to \"/huge-file\" failed: :body_too_large"
+
+ assert {:ok, true} == Cachex.get(:failed_proxy_url_cache, "/huge-file")
+
+ assert capture_log(fn ->
+ ReverseProxy.call(conn, "/huge-file", max_body_length: 4)
+ end) == ""
end
defp stream_mock(invokes, with_close? \\ false) do
describe "returns error on" do
test "500", %{conn: conn} do
error_mock(500)
+ url = "/status/500"
- capture_log(fn -> ReverseProxy.call(conn, "/status/500") end) =~
+ capture_log(fn -> ReverseProxy.call(conn, url) end) =~
"[error] Elixir.Pleroma.ReverseProxy: request to /status/500 failed with HTTP status 500"
+
+ assert Cachex.get(:failed_proxy_url_cache, url) == {:ok, true}
+
+ {:ok, ttl} = Cachex.ttl(:failed_proxy_url_cache, url)
+ assert ttl <= 60_000
end
test "400", %{conn: conn} do
error_mock(400)
+ url = "/status/400"
- capture_log(fn -> ReverseProxy.call(conn, "/status/400") end) =~
+ capture_log(fn -> ReverseProxy.call(conn, url) end) =~
"[error] Elixir.Pleroma.ReverseProxy: request to /status/400 failed with HTTP status 400"
+
+ assert Cachex.get(:failed_proxy_url_cache, url) == {:ok, true}
+ assert Cachex.ttl(:failed_proxy_url_cache, url) == {:ok, nil}
+ end
+
+ test "403", %{conn: conn} do
+ error_mock(403)
+ url = "/status/403"
+
+ capture_log(fn ->
+ ReverseProxy.call(conn, url, failed_request_ttl: :timer.seconds(120))
+ end) =~
+ "[error] Elixir.Pleroma.ReverseProxy: request to /status/403 failed with HTTP status 403"
+
+ {:ok, ttl} = Cachex.ttl(:failed_proxy_url_cache, url)
+ assert ttl > 100_000
end
test "204", %{conn: conn} do
- ClientMock
- |> expect(:request, fn :get, "/status/204", _, _, _ -> {:ok, 204, [], %{}} end)
+ url = "/status/204"
+ expect(ClientMock, :request, fn :get, _url, _, _, _ -> {:ok, 204, [], %{}} end)
capture_log(fn ->
- conn = ReverseProxy.call(conn, "/status/204")
+ conn = ReverseProxy.call(conn, url)
assert conn.resp_body == "Request failed: No Content"
assert conn.halted
end) =~
"[error] Elixir.Pleroma.ReverseProxy: request to \"/status/204\" failed with HTTP status 204"
+
+ assert Cachex.get(:failed_proxy_url_cache, url) == {:ok, true}
+ assert Cachex.ttl(:failed_proxy_url_cache, url) == {:ok, nil}
end
end