Merge branch 'fix/mrf-docs' into 'develop'
[akkoma] / lib / pleroma / web / rich_media / parser.ex
1 # Pleroma: A lightweight social networking server
2 # Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
3 # SPDX-License-Identifier: AGPL-3.0-only
4
5 defmodule Pleroma.Web.RichMedia.Parser do
6 @hackney_options [
7 pool: :media,
8 recv_timeout: 2_000,
9 max_body: 2_000_000,
10 with_body: true
11 ]
12
13 defp parsers do
14 Pleroma.Config.get([:rich_media, :parsers])
15 end
16
17 def parse(nil), do: {:error, "No URL provided"}
18
19 if Pleroma.Config.get(:env) == :test do
20 def parse(url), do: parse_url(url)
21 else
22 def parse(url) do
23 try do
24 Cachex.fetch!(:rich_media_cache, url, fn _ ->
25 {:commit, parse_url(url)}
26 end)
27 |> set_ttl_based_on_image(url)
28 rescue
29 e ->
30 {:error, "Cachex error: #{inspect(e)}"}
31 end
32 end
33 end
34
35 @doc """
36 Set the rich media cache based on the expiration time of image.
37
38 Adopt behaviour `Pleroma.Web.RichMedia.Parser.TTL`
39
40 ## Example
41
42 defmodule MyModule do
43 @behaviour Pleroma.Web.RichMedia.Parser.TTL
44 def ttl(data, url) do
45 image_url = Map.get(data, :image)
46 # do some parsing in the url and get the ttl of the image
47 # and return ttl is unix time
48 parse_ttl_from_url(image_url)
49 end
50 end
51
52 Define the module in the config
53
54 config :pleroma, :rich_media,
55 ttl_setters: [MyModule]
56 """
57 def set_ttl_based_on_image({:ok, data}, url) do
58 with {:ok, nil} <- Cachex.ttl(:rich_media_cache, url),
59 ttl when is_number(ttl) <- get_ttl_from_image(data, url) do
60 Cachex.expire_at(:rich_media_cache, url, ttl * 1000)
61 {:ok, data}
62 else
63 _ ->
64 {:ok, data}
65 end
66 end
67
68 defp get_ttl_from_image(data, url) do
69 Pleroma.Config.get([:rich_media, :ttl_setters])
70 |> Enum.reduce({:ok, nil}, fn
71 module, {:ok, _ttl} ->
72 module.ttl(data, url)
73
74 _, error ->
75 error
76 end)
77 end
78
79 defp parse_url(url) do
80 try do
81 {:ok, %Tesla.Env{body: html}} = Pleroma.HTTP.get(url, [], adapter: @hackney_options)
82
83 html
84 |> parse_html()
85 |> maybe_parse()
86 |> Map.put(:url, url)
87 |> clean_parsed_data()
88 |> check_parsed_data()
89 rescue
90 e ->
91 {:error, "Parsing error: #{inspect(e)} #{inspect(__STACKTRACE__)}"}
92 end
93 end
94
95 defp parse_html(html), do: Floki.parse_document!(html)
96
97 defp maybe_parse(html) do
98 Enum.reduce_while(parsers(), %{}, fn parser, acc ->
99 case parser.parse(html, acc) do
100 {:ok, data} -> {:halt, data}
101 {:error, _msg} -> {:cont, acc}
102 end
103 end)
104 end
105
106 defp check_parsed_data(%{title: title} = data)
107 when is_binary(title) and byte_size(title) > 0 do
108 {:ok, data}
109 end
110
111 defp check_parsed_data(data) do
112 {:error, "Found metadata was invalid or incomplete: #{inspect(data)}"}
113 end
114
115 defp clean_parsed_data(data) do
116 data
117 |> Enum.reject(fn {key, val} ->
118 with {:ok, _} <- Jason.encode(%{key => val}) do
119 false
120 else
121 _ -> true
122 end
123 end)
124 |> Map.new()
125 end
126 end