1 # Pleroma: A lightweight social networking server
2 # Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
3 # SPDX-License-Identifier: AGPL-3.0-only
5 defmodule Pleroma.HTML do
6 alias HtmlSanitizeEx.Scrubber
8 defp get_scrubbers(scrubber) when is_atom(scrubber), do: [scrubber]
9 defp get_scrubbers(scrubbers) when is_list(scrubbers), do: scrubbers
10 defp get_scrubbers(_), do: [Pleroma.HTML.Scrubber.Default]
12 def get_scrubbers() do
13 Pleroma.Config.get([:markup, :scrub_policy])
17 def filter_tags(html, nil) do
18 filter_tags(html, get_scrubbers())
21 def filter_tags(html, scrubbers) when is_list(scrubbers) do
22 Enum.reduce(scrubbers, html, fn scrubber, html ->
23 filter_tags(html, scrubber)
27 def filter_tags(html, scrubber), do: Scrubber.scrub(html, scrubber)
28 def filter_tags(html), do: filter_tags(html, nil)
29 def strip_tags(html), do: Scrubber.scrub(html, Scrubber.StripTags)
31 def get_cached_scrubbed_html_for_object(content, scrubbers, object, module) do
32 key = "#{module}#{generate_scrubber_signature(scrubbers)}|#{object.id}"
33 Cachex.fetch!(:scrubber_cache, key, fn _key -> ensure_scrubbed_html(content, scrubbers) end)
36 def get_cached_stripped_html_for_object(content, object, module) do
37 get_cached_scrubbed_html_for_object(
39 HtmlSanitizeEx.Scrubber.StripTags,
45 def ensure_scrubbed_html(
49 {:commit, filter_tags(content, scrubbers)}
52 defp generate_scrubber_signature(scrubber) when is_atom(scrubber) do
53 generate_scrubber_signature([scrubber])
56 defp generate_scrubber_signature(scrubbers) do
57 Enum.reduce(scrubbers, "", fn scrubber, signature ->
58 "#{signature}#{to_string(scrubber)}"
62 def extract_first_external_url(object, content) do
63 key = "URL|#{object.id}"
65 Cachex.fetch!(:scrubber_cache, key, fn _key ->
68 |> Floki.filter_out("a.mention")
69 |> Floki.attribute("a", "href")
72 {:commit, {:ok, result}}
77 defmodule Pleroma.HTML.Scrubber.TwitterText do
79 An HTML scrubbing policy which limits to twitter-style text. Only
80 paragraphs, breaks and links are allowed through the filter.
83 @markup Application.get_env(:pleroma, :markup)
84 @uri_schemes Application.get_env(:pleroma, :uri_schemes, [])
85 @valid_schemes Keyword.get(@uri_schemes, :valid_schemes, [])
87 require HtmlSanitizeEx.Scrubber.Meta
88 alias HtmlSanitizeEx.Scrubber.Meta
90 Meta.remove_cdata_sections_before_scrub()
94 Meta.allow_tag_with_uri_attributes("a", ["href", "data-user", "data-tag"], @valid_schemes)
95 Meta.allow_tag_with_these_attributes("a", ["name", "title", "class"])
97 # paragraphs and linebreaks
98 Meta.allow_tag_with_these_attributes("br", [])
99 Meta.allow_tag_with_these_attributes("p", [])
102 Meta.allow_tag_with_these_attributes("span", ["class"])
104 # allow inline images for custom emoji
105 @allow_inline_images Keyword.get(@markup, :allow_inline_images)
107 if @allow_inline_images do
108 # restrict img tags to http/https only, because of MediaProxy.
109 Meta.allow_tag_with_uri_attributes("img", ["src"], ["http", "https"])
111 Meta.allow_tag_with_these_attributes("img", [
119 Meta.strip_everything_not_covered()
122 defmodule Pleroma.HTML.Scrubber.Default do
123 @doc "The default HTML scrubbing policy: no "
125 require HtmlSanitizeEx.Scrubber.Meta
126 alias HtmlSanitizeEx.Scrubber.Meta
128 @markup Application.get_env(:pleroma, :markup)
129 @uri_schemes Application.get_env(:pleroma, :uri_schemes, [])
130 @valid_schemes Keyword.get(@uri_schemes, :valid_schemes, [])
132 Meta.remove_cdata_sections_before_scrub()
133 Meta.strip_comments()
135 Meta.allow_tag_with_uri_attributes("a", ["href", "data-user", "data-tag"], @valid_schemes)
136 Meta.allow_tag_with_these_attributes("a", ["name", "title", "class"])
138 Meta.allow_tag_with_these_attributes("abbr", ["title"])
140 Meta.allow_tag_with_these_attributes("b", [])
141 Meta.allow_tag_with_these_attributes("blockquote", [])
142 Meta.allow_tag_with_these_attributes("br", [])
143 Meta.allow_tag_with_these_attributes("code", [])
144 Meta.allow_tag_with_these_attributes("del", [])
145 Meta.allow_tag_with_these_attributes("em", [])
146 Meta.allow_tag_with_these_attributes("i", [])
147 Meta.allow_tag_with_these_attributes("li", [])
148 Meta.allow_tag_with_these_attributes("ol", [])
149 Meta.allow_tag_with_these_attributes("p", [])
150 Meta.allow_tag_with_these_attributes("pre", [])
151 Meta.allow_tag_with_these_attributes("span", ["class"])
152 Meta.allow_tag_with_these_attributes("strong", [])
153 Meta.allow_tag_with_these_attributes("u", [])
154 Meta.allow_tag_with_these_attributes("ul", [])
156 @allow_inline_images Keyword.get(@markup, :allow_inline_images)
158 if @allow_inline_images do
159 # restrict img tags to http/https only, because of MediaProxy.
160 Meta.allow_tag_with_uri_attributes("img", ["src"], ["http", "https"])
162 Meta.allow_tag_with_these_attributes("img", [
170 @allow_tables Keyword.get(@markup, :allow_tables)
173 Meta.allow_tag_with_these_attributes("table", [])
174 Meta.allow_tag_with_these_attributes("tbody", [])
175 Meta.allow_tag_with_these_attributes("td", [])
176 Meta.allow_tag_with_these_attributes("th", [])
177 Meta.allow_tag_with_these_attributes("thead", [])
178 Meta.allow_tag_with_these_attributes("tr", [])
181 @allow_headings Keyword.get(@markup, :allow_headings)
183 if @allow_headings do
184 Meta.allow_tag_with_these_attributes("h1", [])
185 Meta.allow_tag_with_these_attributes("h2", [])
186 Meta.allow_tag_with_these_attributes("h3", [])
187 Meta.allow_tag_with_these_attributes("h4", [])
188 Meta.allow_tag_with_these_attributes("h5", [])
191 @allow_fonts Keyword.get(@markup, :allow_fonts)
194 Meta.allow_tag_with_these_attributes("font", ["face"])
197 Meta.strip_everything_not_covered()
200 defmodule Pleroma.HTML.Transform.MediaProxy do
201 @moduledoc "Transforms inline image URIs to use MediaProxy."
203 alias Pleroma.Web.MediaProxy
205 def before_scrub(html), do: html
207 def scrub_attribute("img", {"src", "http" <> target}) do
215 def scrub_attribute(_tag, attribute), do: attribute
217 def scrub({"img", attributes, children}) do
220 |> Enum.map(fn attr -> scrub_attribute("img", attr) end)
221 |> Enum.reject(&is_nil(&1))
223 {"img", attributes, children}
226 def scrub({:comment, _children}), do: ""
228 def scrub({tag, attributes, children}), do: {tag, attributes, children}
229 def scrub({_tag, children}), do: children
230 def scrub(text), do: text