1 # Pleroma: A lightweight social networking server
2 # Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
3 # SPDX-License-Identifier: AGPL-3.0-only
5 defmodule Pleroma.HTML do
6 # Scrubbers are compiled on boot so they can be configured in OTP releases
7 # @on_load :compile_scrubbers
9 @cachex Pleroma.Config.get([:cachex, :provider], Cachex)
11 def compile_scrubbers do
12 dir = Path.join(:code.priv_dir(:pleroma), "scrubbers")
15 |> Pleroma.Utils.compile_dir()
17 {:error, _errors, _warnings} ->
18 raise "Compiling scrubbers failed"
20 {:ok, _modules, _warnings} ->
25 defp get_scrubbers(scrubber) when is_atom(scrubber), do: [scrubber]
26 defp get_scrubbers(scrubbers) when is_list(scrubbers), do: scrubbers
27 defp get_scrubbers(_), do: [Pleroma.HTML.Scrubber.Default]
30 Pleroma.Config.get([:markup, :scrub_policy])
34 def filter_tags(html, nil) do
35 filter_tags(html, get_scrubbers())
38 def filter_tags(html, scrubbers) when is_list(scrubbers) do
39 Enum.reduce(scrubbers, html, fn scrubber, html ->
40 filter_tags(html, scrubber)
44 def filter_tags(html, scrubber) do
45 {:ok, content} = FastSanitize.Sanitizer.scrub(html, scrubber)
49 def filter_tags(html), do: filter_tags(html, nil)
50 def strip_tags(html), do: filter_tags(html, FastSanitize.Sanitizer.StripTags)
52 def get_cached_scrubbed_html_for_activity(
57 callback \\ fn x -> x end
59 key = "#{key}#{generate_scrubber_signature(scrubbers)}|#{activity.id}"
61 @cachex.fetch!(:scrubber_cache, key, fn _key ->
62 object = Pleroma.Object.normalize(activity, fetch: false)
63 ensure_scrubbed_html(content, scrubbers, object.data["fake"] || false, callback)
67 def get_cached_stripped_html_for_activity(content, activity, key) do
68 get_cached_scrubbed_html_for_activity(
70 FastSanitize.Sanitizer.StripTags,
73 &HtmlEntities.decode/1
77 def ensure_scrubbed_html(
85 |> filter_tags(scrubbers)
95 defp generate_scrubber_signature(scrubber) when is_atom(scrubber) do
96 generate_scrubber_signature([scrubber])
99 defp generate_scrubber_signature(scrubbers) do
100 Enum.reduce(scrubbers, "", fn scrubber, signature ->
101 "#{signature}#{to_string(scrubber)}"
105 def extract_first_external_url_from_object(%{data: %{"content" => content}} = object)
106 when is_binary(content) do
107 unless object.data["fake"] do
108 key = "URL|#{object.id}"
110 @cachex.fetch!(:scrubber_cache, key, fn _key ->
111 {:commit, {:ok, extract_first_external_url(content)}}
114 {:ok, extract_first_external_url(content)}
118 def extract_first_external_url_from_object(_), do: {:error, :no_content}
120 def extract_first_external_url(content) do
122 |> Floki.parse_fragment!()
123 |> Floki.find("a:not(.mention,.hashtag,.attachment,[rel~=\"tag\"])")
125 |> Floki.attribute("href")