Merge branch 'develop' into feature/hide-follows-remote
[akkoma] / lib / pleroma / web / rich_media / parser.ex
1 # Pleroma: A lightweight social networking server
2 # Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
3 # SPDX-License-Identifier: AGPL-3.0-only
4
5 defmodule Pleroma.Web.RichMedia.Parser do
6 @hackney_options [
7 pool: :media,
8 recv_timeout: 2_000,
9 max_body: 2_000_000,
10 with_body: true
11 ]
12
13 defp parsers do
14 Pleroma.Config.get([:rich_media, :parsers])
15 end
16
17 def parse(nil), do: {:error, "No URL provided"}
18
19 if Pleroma.Config.get(:env) == :test do
20 def parse(url), do: parse_url(url)
21 else
22 def parse(url) do
23 try do
24 Cachex.fetch!(:rich_media_cache, url, fn _ ->
25 {:commit, parse_url(url)}
26 end)
27 |> set_ttl_based_on_image(url)
28 rescue
29 e ->
30 {:error, "Cachex error: #{inspect(e)}"}
31 end
32 end
33 end
34
35 @doc """
36 Set the rich media cache based on the expiration time of image.
37
38 Adopt behaviour `Pleroma.Web.RichMedia.Parser.TTL`
39
40 ## Example
41
42 defmodule MyModule do
43 @behaviour Pleroma.Web.RichMedia.Parser.TTL
44 def ttl(data, url) do
45 image_url = Map.get(data, :image)
46 # do some parsing in the url and get the ttl of the image
47 # and return ttl is unix time
48 parse_ttl_from_url(image_url)
49 end
50 end
51
52 Define the module in the config
53
54 config :pleroma, :rich_media,
55 ttl_setters: [MyModule]
56 """
57 def set_ttl_based_on_image({:ok, data}, url) do
58 with {:ok, nil} <- Cachex.ttl(:rich_media_cache, url),
59 ttl when is_number(ttl) <- get_ttl_from_image(data, url) do
60 Cachex.expire_at(:rich_media_cache, url, ttl * 1000)
61 {:ok, data}
62 else
63 _ ->
64 {:ok, data}
65 end
66 end
67
68 defp get_ttl_from_image(data, url) do
69 Pleroma.Config.get([:rich_media, :ttl_setters])
70 |> Enum.reduce({:ok, nil}, fn
71 module, {:ok, _ttl} ->
72 module.ttl(data, url)
73
74 _, error ->
75 error
76 end)
77 end
78
79 defp parse_url(url) do
80 try do
81 {:ok, %Tesla.Env{body: html}} = Pleroma.HTTP.get(url, [], adapter: @hackney_options)
82
83 html
84 |> maybe_parse()
85 |> Map.put(:url, url)
86 |> clean_parsed_data()
87 |> check_parsed_data()
88 rescue
89 e ->
90 {:error, "Parsing error: #{inspect(e)}"}
91 end
92 end
93
94 defp maybe_parse(html) do
95 Enum.reduce_while(parsers(), %{}, fn parser, acc ->
96 case parser.parse(html, acc) do
97 {:ok, data} -> {:halt, data}
98 {:error, _msg} -> {:cont, acc}
99 end
100 end)
101 end
102
103 defp check_parsed_data(%{title: title} = data) when is_binary(title) and byte_size(title) > 0 do
104 {:ok, data}
105 end
106
107 defp check_parsed_data(data) do
108 {:error, "Found metadata was invalid or incomplete: #{inspect(data)}"}
109 end
110
111 defp clean_parsed_data(data) do
112 data
113 |> Enum.reject(fn {key, val} ->
114 with {:ok, _} <- Jason.encode(%{key => val}) do
115 false
116 else
117 _ -> true
118 end
119 end)
120 |> Map.new()
121 end
122 end