Merge branch 'develop' into refactor/notification_settings
[akkoma] / lib / pleroma / html.ex
1 # Pleroma: A lightweight social networking server
2 # Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
3 # SPDX-License-Identifier: AGPL-3.0-only
4
5 defmodule Pleroma.HTML do
6 # Scrubbers are compiled on boot so they can be configured in OTP releases
7 # @on_load :compile_scrubbers
8
9 def compile_scrubbers do
10 dir = Path.join(:code.priv_dir(:pleroma), "scrubbers")
11
12 dir
13 |> Pleroma.Utils.compile_dir()
14 |> case do
15 {:error, _errors, _warnings} ->
16 raise "Compiling scrubbers failed"
17
18 {:ok, _modules, _warnings} ->
19 :ok
20 end
21 end
22
23 defp get_scrubbers(scrubber) when is_atom(scrubber), do: [scrubber]
24 defp get_scrubbers(scrubbers) when is_list(scrubbers), do: scrubbers
25 defp get_scrubbers(_), do: [Pleroma.HTML.Scrubber.Default]
26
27 def get_scrubbers do
28 Pleroma.Config.get([:markup, :scrub_policy])
29 |> get_scrubbers
30 end
31
32 def filter_tags(html, nil) do
33 filter_tags(html, get_scrubbers())
34 end
35
36 def filter_tags(html, scrubbers) when is_list(scrubbers) do
37 Enum.reduce(scrubbers, html, fn scrubber, html ->
38 filter_tags(html, scrubber)
39 end)
40 end
41
42 def filter_tags(html, scrubber) do
43 {:ok, content} = FastSanitize.Sanitizer.scrub(html, scrubber)
44 content
45 end
46
47 def filter_tags(html), do: filter_tags(html, nil)
48 def strip_tags(html), do: filter_tags(html, FastSanitize.Sanitizer.StripTags)
49
50 def get_cached_scrubbed_html_for_activity(
51 content,
52 scrubbers,
53 activity,
54 key \\ "",
55 callback \\ fn x -> x end
56 ) do
57 key = "#{key}#{generate_scrubber_signature(scrubbers)}|#{activity.id}"
58
59 Cachex.fetch!(:scrubber_cache, key, fn _key ->
60 object = Pleroma.Object.normalize(activity)
61 ensure_scrubbed_html(content, scrubbers, object.data["fake"] || false, callback)
62 end)
63 end
64
65 def get_cached_stripped_html_for_activity(content, activity, key) do
66 get_cached_scrubbed_html_for_activity(
67 content,
68 FastSanitize.Sanitizer.StripTags,
69 activity,
70 key,
71 &HtmlEntities.decode/1
72 )
73 end
74
75 def ensure_scrubbed_html(
76 content,
77 scrubbers,
78 fake,
79 callback
80 ) do
81 content =
82 content
83 |> filter_tags(scrubbers)
84 |> callback.()
85
86 if fake do
87 {:ignore, content}
88 else
89 {:commit, content}
90 end
91 end
92
93 defp generate_scrubber_signature(scrubber) when is_atom(scrubber) do
94 generate_scrubber_signature([scrubber])
95 end
96
97 defp generate_scrubber_signature(scrubbers) do
98 Enum.reduce(scrubbers, "", fn scrubber, signature ->
99 "#{signature}#{to_string(scrubber)}"
100 end)
101 end
102
103 def extract_first_external_url(_, nil), do: {:error, "No content"}
104
105 def extract_first_external_url(object, content) do
106 key = "URL|#{object.id}"
107
108 Cachex.fetch!(:scrubber_cache, key, fn _key ->
109 result =
110 content
111 |> Floki.parse_fragment!()
112 |> Floki.filter_out("a.mention,a.hashtag,a.attachment,a[rel~=\"tag\"]")
113 |> Floki.attribute("a", "href")
114 |> Enum.at(0)
115
116 {:commit, {:ok, result}}
117 end)
118 end
119 end