projects
/
akkoma
/ blobdiff
commit
grep
author
committer
pickaxe
?
search:
re
summary
|
shortlog
|
log
|
commit
|
commitdiff
|
tree
raw
|
inline
| side by side
Merge branch 'feat/format-optional-migrations' into 'develop'
[akkoma]
/
lib
/
pleroma
/
html.ex
diff --git
a/lib/pleroma/html.ex
b/lib/pleroma/html.ex
index 11513106eb643769c72701865459cd9eb70a22dc..c5ece7350247e9fc69e8fa383063cad79d723622 100644
(file)
--- a/
lib/pleroma/html.ex
+++ b/
lib/pleroma/html.ex
@@
-1,11
+1,13
@@
# Pleroma: A lightweight social networking server
# Pleroma: A lightweight social networking server
-# Copyright © 2017-20
19
Pleroma Authors <https://pleroma.social/>
+# Copyright © 2017-20
20
Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.HTML do
# Scrubbers are compiled on boot so they can be configured in OTP releases
# @on_load :compile_scrubbers
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.HTML do
# Scrubbers are compiled on boot so they can be configured in OTP releases
# @on_load :compile_scrubbers
+ @cachex Pleroma.Config.get([:cachex, :provider], Cachex)
+
def compile_scrubbers do
dir = Path.join(:code.priv_dir(:pleroma), "scrubbers")
def compile_scrubbers do
dir = Path.join(:code.priv_dir(:pleroma), "scrubbers")
@@
-56,8
+58,8
@@
defmodule Pleroma.HTML do
) do
key = "#{key}#{generate_scrubber_signature(scrubbers)}|#{activity.id}"
) do
key = "#{key}#{generate_scrubber_signature(scrubbers)}|#{activity.id}"
-
C
achex.fetch!(:scrubber_cache, key, fn _key ->
- object = Pleroma.Object.normalize(activity)
+
@c
achex.fetch!(:scrubber_cache, key, fn _key ->
+ object = Pleroma.Object.normalize(activity
, fetch: false
)
ensure_scrubbed_html(content, scrubbers, object.data["fake"] || false, callback)
end)
end
ensure_scrubbed_html(content, scrubbers, object.data["fake"] || false, callback)
end)
end
@@
-100,19
+102,27
@@
defmodule Pleroma.HTML do
end)
end
end)
end
- def extract_first_external_url(_, nil), do: {:error, "No content"}
+ def extract_first_external_url_from_object(%{data: %{"content" => content}} = object)
+ when is_binary(content) do
+ unless object.data["fake"] do
+ key = "URL|#{object.id}"
- def extract_first_external_url(object, content) do
- key = "URL|#{object.id}"
+ @cachex.fetch!(:scrubber_cache, key, fn _key ->
+ {:commit, {:ok, extract_first_external_url(content)}}
+ end)
+ else
+ {:ok, extract_first_external_url(content)}
+ end
+ end
- Cachex.fetch!(:scrubber_cache, key, fn _key ->
- result =
- content
- |> Floki.filter_out("a.mention,a.hashtag,a[rel~=\"tag\"]")
- |> Floki.attribute("a", "href")
- |> Enum.at(0)
+ def extract_first_external_url_from_object(_), do: {:error, :no_content}
- {:commit, {:ok, result}}
- end)
+ def extract_first_external_url(content) do
+ content
+ |> Floki.parse_fragment!()
+ |> Floki.find("a:not(.mention,.hashtag,.attachment,[rel~=\"tag\"])")
+ |> Enum.take(1)
+ |> Floki.attribute("href")
+ |> Enum.at(0)
end
end
end
end