X-Git-Url: http://git.squeep.com/?a=blobdiff_plain;f=lib%2Fpleroma%2Fweb%2Frich_media%2Fparser.ex;h=e98c743caa7d4be1214b4a1157fc365d2305f571;hb=dccbed856df131900bf7851fbd11c518f85bce7a;hp=ef5ead2daf1fdcad1309591344223c07513f5136;hpb=828841968640f67a06ba8e2d0dd7c38b1d9e3729;p=akkoma diff --git a/lib/pleroma/web/rich_media/parser.ex b/lib/pleroma/web/rich_media/parser.ex index ef5ead2da..e98c743ca 100644 --- a/lib/pleroma/web/rich_media/parser.ex +++ b/lib/pleroma/web/rich_media/parser.ex @@ -3,10 +3,7 @@ # SPDX-License-Identifier: AGPL-3.0-only defmodule Pleroma.Web.RichMedia.Parser do - @options [ - pool: :media, - max_body: 2_000_000 - ] + require Logger defp parsers do Pleroma.Config.get([:rich_media, :parsers]) @@ -15,17 +12,29 @@ defmodule Pleroma.Web.RichMedia.Parser do def parse(nil), do: {:error, "No URL provided"} if Pleroma.Config.get(:env) == :test do + @spec parse(String.t()) :: {:ok, map()} | {:error, any()} def parse(url), do: parse_url(url) else + @spec parse(String.t()) :: {:ok, map()} | {:error, any()} def parse(url) do - try do - Cachex.fetch!(:rich_media_cache, url, fn _ -> - {:commit, parse_url(url)} - end) - |> set_ttl_based_on_image(url) - rescue - e -> - {:error, "Cachex error: #{inspect(e)}"} + with {:ok, data} <- get_cached_or_parse(url), + {:ok, _} <- set_ttl_based_on_image(data, url) do + {:ok, data} + else + error -> + Logger.error(fn -> "Rich media error: #{inspect(error)}" end) + end + end + + defp get_cached_or_parse(url) do + case Cachex.fetch!(:rich_media_cache, url, fn _ -> {:commit, parse_url(url)} end) do + {:ok, _data} = res -> + res + + {:error, _} = e -> + ttl = Pleroma.Config.get([:rich_media, :failure_backoff], 60_000) + Cachex.expire(:rich_media_cache, url, ttl) + e end end end @@ -52,19 +61,26 @@ defmodule Pleroma.Web.RichMedia.Parser do config :pleroma, :rich_media, ttl_setters: [MyModule] """ - def set_ttl_based_on_image({:ok, data}, url) do - with {:ok, nil} <- Cachex.ttl(:rich_media_cache, url), - ttl when is_number(ttl) <- get_ttl_from_image(data, url) do - Cachex.expire_at(:rich_media_cache, url, ttl * 1000) - {:ok, data} - else + @spec set_ttl_based_on_image(map(), String.t()) :: + {:ok, Integer.t() | :noop} | {:error, :no_key} + def set_ttl_based_on_image(data, url) do + case get_ttl_from_image(data, url) do + {:ok, ttl} when is_number(ttl) -> + ttl = ttl * 1000 + + case Cachex.expire_at(:rich_media_cache, url, ttl) do + {:ok, true} -> {:ok, ttl} + {:ok, false} -> {:error, :no_key} + end + _ -> - {:ok, data} + {:ok, :noop} end end defp get_ttl_from_image(data, url) do - Pleroma.Config.get([:rich_media, :ttl_setters]) + [:rich_media, :ttl_setters] + |> Pleroma.Config.get() |> Enum.reduce({:ok, nil}, fn module, {:ok, _ttl} -> module.ttl(data, url) @@ -75,33 +91,16 @@ defmodule Pleroma.Web.RichMedia.Parser do end defp parse_url(url) do - opts = - if Application.get_env(:tesla, :adapter) == Tesla.Adapter.Hackney do - Keyword.merge(@options, - recv_timeout: 2_000, - with_body: true - ) - else - @options - end - - try do - {:ok, %Tesla.Env{body: html}} = Pleroma.HTTP.get(url, [], adapter: opts) - + with {:ok, %Tesla.Env{body: html}} <- Pleroma.Web.RichMedia.Helpers.rich_media_get(url), + {:ok, html} <- Floki.parse_document(html) do html - |> parse_html() |> maybe_parse() |> Map.put("url", url) |> clean_parsed_data() |> check_parsed_data() - rescue - e -> - {:error, "Parsing error: #{inspect(e)} #{inspect(__STACKTRACE__)}"} end end - defp parse_html(html), do: Floki.parse_document!(html) - defp maybe_parse(html) do Enum.reduce_while(parsers(), %{}, fn parser, acc -> case parser.parse(html, acc) do