Merge branch 'develop' into 'remove-twitter-api'
[akkoma] / lib / pleroma / web / rich_media / parser.ex
1 # Pleroma: A lightweight social networking server
2 # Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
3 # SPDX-License-Identifier: AGPL-3.0-only
4
5 defmodule Pleroma.Web.RichMedia.Parser do
6 @options [
7 pool: :media,
8 max_body: 2_000_000
9 ]
10
11 defp parsers do
12 Pleroma.Config.get([:rich_media, :parsers])
13 end
14
15 def parse(nil), do: {:error, "No URL provided"}
16
17 if Pleroma.Config.get(:env) == :test do
18 def parse(url), do: parse_url(url)
19 else
20 def parse(url) do
21 try do
22 Cachex.fetch!(:rich_media_cache, url, fn _ ->
23 {:commit, parse_url(url)}
24 end)
25 |> set_ttl_based_on_image(url)
26 rescue
27 e ->
28 {:error, "Cachex error: #{inspect(e)}"}
29 end
30 end
31 end
32
33 @doc """
34 Set the rich media cache based on the expiration time of image.
35
36 Adopt behaviour `Pleroma.Web.RichMedia.Parser.TTL`
37
38 ## Example
39
40 defmodule MyModule do
41 @behaviour Pleroma.Web.RichMedia.Parser.TTL
42 def ttl(data, url) do
43 image_url = Map.get(data, :image)
44 # do some parsing in the url and get the ttl of the image
45 # and return ttl is unix time
46 parse_ttl_from_url(image_url)
47 end
48 end
49
50 Define the module in the config
51
52 config :pleroma, :rich_media,
53 ttl_setters: [MyModule]
54 """
55 def set_ttl_based_on_image({:ok, data}, url) do
56 with {:ok, nil} <- Cachex.ttl(:rich_media_cache, url),
57 ttl when is_number(ttl) <- get_ttl_from_image(data, url) do
58 Cachex.expire_at(:rich_media_cache, url, ttl * 1000)
59 {:ok, data}
60 else
61 _ ->
62 {:ok, data}
63 end
64 end
65
66 defp get_ttl_from_image(data, url) do
67 Pleroma.Config.get([:rich_media, :ttl_setters])
68 |> Enum.reduce({:ok, nil}, fn
69 module, {:ok, _ttl} ->
70 module.ttl(data, url)
71
72 _, error ->
73 error
74 end)
75 end
76
77 defp parse_url(url) do
78 opts =
79 if Application.get_env(:tesla, :adapter) == Tesla.Adapter.Hackney do
80 Keyword.merge(@options,
81 recv_timeout: 2_000,
82 with_body: true
83 )
84 else
85 @options
86 end
87
88 try do
89 {:ok, %Tesla.Env{body: html}} = Pleroma.HTTP.get(url, [], adapter: opts)
90
91 html
92 |> parse_html()
93 |> maybe_parse()
94 |> Map.put(:url, url)
95 |> clean_parsed_data()
96 |> check_parsed_data()
97 rescue
98 e ->
99 {:error, "Parsing error: #{inspect(e)} #{inspect(__STACKTRACE__)}"}
100 end
101 end
102
103 defp parse_html(html), do: Floki.parse_document!(html)
104
105 defp maybe_parse(html) do
106 Enum.reduce_while(parsers(), %{}, fn parser, acc ->
107 case parser.parse(html, acc) do
108 {:ok, data} -> {:halt, data}
109 {:error, _msg} -> {:cont, acc}
110 end
111 end)
112 end
113
114 defp check_parsed_data(%{title: title} = data)
115 when is_binary(title) and byte_size(title) > 0 do
116 {:ok, data}
117 end
118
119 defp check_parsed_data(data) do
120 {:error, "Found metadata was invalid or incomplete: #{inspect(data)}"}
121 end
122
123 defp clean_parsed_data(data) do
124 data
125 |> Enum.reject(fn {key, val} ->
126 with {:ok, _} <- Jason.encode(%{key => val}) do
127 false
128 else
129 _ -> true
130 end
131 end)
132 |> Map.new()
133 end
134 end