Merge branch 'issue/1342' into 'develop'
[akkoma] / lib / pleroma / html.ex
1 # Pleroma: A lightweight social networking server
2 # Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
3 # SPDX-License-Identifier: AGPL-3.0-only
4
5 defmodule Pleroma.HTML do
6 # Scrubbers are compiled on boot so they can be configured in OTP releases
7 # @on_load :compile_scrubbers
8
9 def compile_scrubbers do
10 dir = Path.join(:code.priv_dir(:pleroma), "scrubbers")
11
12 dir
13 |> File.ls!()
14 |> Enum.map(&Path.join(dir, &1))
15 |> Kernel.ParallelCompiler.compile()
16 |> case do
17 {:error, _errors, _warnings} ->
18 raise "Compiling scrubbers failed"
19
20 {:ok, _modules, _warnings} ->
21 :ok
22 end
23 end
24
25 defp get_scrubbers(scrubber) when is_atom(scrubber), do: [scrubber]
26 defp get_scrubbers(scrubbers) when is_list(scrubbers), do: scrubbers
27 defp get_scrubbers(_), do: [Pleroma.HTML.Scrubber.Default]
28
29 def get_scrubbers do
30 Pleroma.Config.get([:markup, :scrub_policy])
31 |> get_scrubbers
32 end
33
34 def filter_tags(html, nil) do
35 filter_tags(html, get_scrubbers())
36 end
37
38 def filter_tags(html, scrubbers) when is_list(scrubbers) do
39 Enum.reduce(scrubbers, html, fn scrubber, html ->
40 filter_tags(html, scrubber)
41 end)
42 end
43
44 def filter_tags(html, scrubber) do
45 {:ok, content} = FastSanitize.Sanitizer.scrub(html, scrubber)
46 content
47 end
48
49 def filter_tags(html), do: filter_tags(html, nil)
50 def strip_tags(html), do: filter_tags(html, FastSanitize.Sanitizer.StripTags)
51
52 def get_cached_scrubbed_html_for_activity(
53 content,
54 scrubbers,
55 activity,
56 key \\ "",
57 callback \\ fn x -> x end
58 ) do
59 key = "#{key}#{generate_scrubber_signature(scrubbers)}|#{activity.id}"
60
61 Cachex.fetch!(:scrubber_cache, key, fn _key ->
62 object = Pleroma.Object.normalize(activity)
63 ensure_scrubbed_html(content, scrubbers, object.data["fake"] || false, callback)
64 end)
65 end
66
67 def get_cached_stripped_html_for_activity(content, activity, key) do
68 get_cached_scrubbed_html_for_activity(
69 content,
70 FastSanitize.Sanitizer.StripTags,
71 activity,
72 key,
73 &HtmlEntities.decode/1
74 )
75 end
76
77 def ensure_scrubbed_html(
78 content,
79 scrubbers,
80 fake,
81 callback
82 ) do
83 content =
84 content
85 |> filter_tags(scrubbers)
86 |> callback.()
87
88 if fake do
89 {:ignore, content}
90 else
91 {:commit, content}
92 end
93 end
94
95 defp generate_scrubber_signature(scrubber) when is_atom(scrubber) do
96 generate_scrubber_signature([scrubber])
97 end
98
99 defp generate_scrubber_signature(scrubbers) do
100 Enum.reduce(scrubbers, "", fn scrubber, signature ->
101 "#{signature}#{to_string(scrubber)}"
102 end)
103 end
104
105 def extract_first_external_url(_, nil), do: {:error, "No content"}
106
107 def extract_first_external_url(object, content) do
108 key = "URL|#{object.id}"
109
110 Cachex.fetch!(:scrubber_cache, key, fn _key ->
111 result =
112 content
113 |> Floki.filter_out("a.mention,a.hashtag,a[rel~=\"tag\"]")
114 |> Floki.attribute("a", "href")
115 |> Enum.at(0)
116
117 {:commit, {:ok, result}}
118 end)
119 end
120 end