X-Git-Url: http://git.squeep.com/?a=blobdiff_plain;f=lib%2Fpleroma%2Fweb%2Frich_media%2Fparser.ex;h=c70d2fdbade7c66a6f253ecbf2e0d3672523e059;hb=f3a1f9c3bbb7321876a09b3846b5e10ecf4af94f;hp=569249f51ff59f626c378c73e1ca05e981677f30;hpb=a0f5e8b27edbe2224d9c2c3997ad5b8ea484244b;p=akkoma diff --git a/lib/pleroma/web/rich_media/parser.ex b/lib/pleroma/web/rich_media/parser.ex index 569249f51..c70d2fdba 100644 --- a/lib/pleroma/web/rich_media/parser.ex +++ b/lib/pleroma/web/rich_media/parser.ex @@ -20,31 +20,61 @@ defmodule Pleroma.Web.RichMedia.Parser do with {:ok, data} <- get_cached_or_parse(url), {:ok, _} <- set_ttl_based_on_image(data, url) do {:ok, data} - else - error -> - Logger.error(fn -> "Rich media error: #{inspect(error)}" end) end end defp get_cached_or_parse(url) do - case Cachex.fetch!(:rich_media_cache, url, fn _ -> {:commit, parse_url(url)} end) do - {:ok, _data} = res -> - res - - {:error, :body_too_large} = e -> - e - - {:error, {:content_type, _}} = e -> - e - - # The TTL is not set for the errors above, since they are unlikely to change - # with time - {:error, _} = e -> - ttl = Pleroma.Config.get([:rich_media, :failure_backoff], 60_000) - Cachex.expire(:rich_media_cache, url, ttl) - e + case Cachex.fetch(:rich_media_cache, url, fn -> + case parse_url(url) do + {:ok, _} = res -> + {:commit, res} + + {:error, reason} = e -> + # Unfortunately we have to log errors here, instead of doing that + # along with ttl setting at the bottom. Otherwise we can get log spam + # if more than one process was waiting for the rich media card + # while it was generated. Ideally we would set ttl here as well, + # so we don't override it number_of_waiters_on_generation + # times, but one, obviously, can't set ttl for not-yet-created entry + # and Cachex doesn't support returning ttl from the fetch callback. + log_error(url, reason) + {:commit, e} + end + end) do + {action, res} when action in [:commit, :ok] -> + case res do + {:ok, _data} = res -> + res + + {:error, reason} = e -> + if action == :commit, do: set_error_ttl(url, reason) + e + end + + {:error, e} -> + {:error, {:cachex_error, e}} end end + + defp set_error_ttl(_url, :body_too_large), do: :ok + defp set_error_ttl(_url, {:content_type, _}), do: :ok + + # The TTL is not set for the errors above, since they are unlikely to change + # with time + + defp set_error_ttl(url, _reason) do + ttl = Pleroma.Config.get([:rich_media, :failure_backoff], 60_000) + Cachex.expire(:rich_media_cache, url, ttl) + :ok + end + + defp log_error(url, {:invalid_metadata, data}) do + Logger.debug(fn -> "Incomplete or invalid metadata for #{url}: #{inspect(data)}" end) + end + + defp log_error(url, reason) do + Logger.warn(fn -> "Rich media error for #{url}: #{inspect(reason)}" end) + end end @doc """ @@ -98,7 +128,7 @@ defmodule Pleroma.Web.RichMedia.Parser do end) end - defp parse_url(url) do + def parse_url(url) do with {:ok, %Tesla.Env{body: html}} <- Pleroma.Web.RichMedia.Helpers.rich_media_get(url), {:ok, html} <- Floki.parse_document(html) do html @@ -124,7 +154,7 @@ defmodule Pleroma.Web.RichMedia.Parser do end defp check_parsed_data(data) do - {:error, "Found metadata was invalid or incomplete: #{inspect(data)}"} + {:error, {:invalid_metadata, data}} end defp clean_parsed_data(data) do