fix formatting
[akkoma] / lib / pleroma / web / ostatus / ostatus.ex
1 # Pleroma: A lightweight social networking server
2 # Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
3 # SPDX-License-Identifier: AGPL-3.0-only
4
5 defmodule Pleroma.Web.OStatus do
6 import Ecto.Query
7 import Pleroma.Web.XML
8 require Logger
9
10 alias Pleroma.Activity
11 alias Pleroma.HTTP
12 alias Pleroma.Object
13 alias Pleroma.Repo
14 alias Pleroma.User
15 alias Pleroma.Web
16 alias Pleroma.Web.ActivityPub.ActivityPub
17 alias Pleroma.Web.ActivityPub.Transmogrifier
18 alias Pleroma.Web.ActivityPub.Visibility
19 alias Pleroma.Web.OStatus.DeleteHandler
20 alias Pleroma.Web.OStatus.FollowHandler
21 alias Pleroma.Web.OStatus.NoteHandler
22 alias Pleroma.Web.OStatus.UnfollowHandler
23 alias Pleroma.Web.WebFinger
24 alias Pleroma.Web.Websub
25
26 def is_representable?(%Activity{} = activity) do
27 object = Object.normalize(activity)
28
29 cond do
30 is_nil(object) ->
31 false
32
33 Visibility.is_public?(activity) && object.data["type"] == "Note" ->
34 true
35
36 true ->
37 false
38 end
39 end
40
41 def feed_path(user) do
42 "#{user.ap_id}/feed.atom"
43 end
44
45 def pubsub_path(user) do
46 "#{Web.base_url()}/push/hub/#{user.nickname}"
47 end
48
49 def salmon_path(user) do
50 "#{user.ap_id}/salmon"
51 end
52
53 def remote_follow_path do
54 "#{Web.base_url()}/ostatus_subscribe?acct={uri}"
55 end
56
57 def handle_incoming(xml_string, options \\ []) do
58 with doc when doc != :error <- parse_document(xml_string) do
59 with {:ok, actor_user} <- find_make_or_update_actor(doc),
60 do: Pleroma.Instances.set_reachable(actor_user.ap_id)
61
62 entries = :xmerl_xpath.string('//entry', doc)
63
64 activities =
65 Enum.map(entries, fn entry ->
66 {:xmlObj, :string, object_type} =
67 :xmerl_xpath.string('string(/entry/activity:object-type[1])', entry)
68
69 {:xmlObj, :string, verb} = :xmerl_xpath.string('string(/entry/activity:verb[1])', entry)
70 Logger.debug("Handling #{verb}")
71
72 try do
73 case verb do
74 'http://activitystrea.ms/schema/1.0/delete' ->
75 with {:ok, activity} <- DeleteHandler.handle_delete(entry, doc), do: activity
76
77 'http://activitystrea.ms/schema/1.0/follow' ->
78 with {:ok, activity} <- FollowHandler.handle(entry, doc), do: activity
79
80 'http://activitystrea.ms/schema/1.0/unfollow' ->
81 with {:ok, activity} <- UnfollowHandler.handle(entry, doc), do: activity
82
83 'http://activitystrea.ms/schema/1.0/share' ->
84 with {:ok, activity, retweeted_activity} <- handle_share(entry, doc),
85 do: [activity, retweeted_activity]
86
87 'http://activitystrea.ms/schema/1.0/favorite' ->
88 with {:ok, activity, favorited_activity} <- handle_favorite(entry, doc),
89 do: [activity, favorited_activity]
90
91 _ ->
92 case object_type do
93 'http://activitystrea.ms/schema/1.0/note' ->
94 with {:ok, activity} <- NoteHandler.handle_note(entry, doc, options),
95 do: activity
96
97 'http://activitystrea.ms/schema/1.0/comment' ->
98 with {:ok, activity} <- NoteHandler.handle_note(entry, doc, options),
99 do: activity
100
101 _ ->
102 Logger.error("Couldn't parse incoming document")
103 nil
104 end
105 end
106 rescue
107 e ->
108 Logger.error("Error occured while handling activity")
109 Logger.error(xml_string)
110 Logger.error(inspect(e))
111 nil
112 end
113 end)
114 |> Enum.filter(& &1)
115
116 {:ok, activities}
117 else
118 _e -> {:error, []}
119 end
120 end
121
122 def make_share(entry, doc, retweeted_activity) do
123 with {:ok, actor} <- find_make_or_update_actor(doc),
124 %Object{} = object <- Object.normalize(retweeted_activity),
125 id when not is_nil(id) <- string_from_xpath("/entry/id", entry),
126 {:ok, activity, _object} = ActivityPub.announce(actor, object, id, false) do
127 {:ok, activity}
128 end
129 end
130
131 def handle_share(entry, doc) do
132 with {:ok, retweeted_activity} <- get_or_build_object(entry),
133 {:ok, activity} <- make_share(entry, doc, retweeted_activity) do
134 {:ok, activity, retweeted_activity}
135 else
136 e -> {:error, e}
137 end
138 end
139
140 def make_favorite(entry, doc, favorited_activity) do
141 with {:ok, actor} <- find_make_or_update_actor(doc),
142 %Object{} = object <- Object.normalize(favorited_activity),
143 id when not is_nil(id) <- string_from_xpath("/entry/id", entry),
144 {:ok, activity, _object} = ActivityPub.like(actor, object, id, false) do
145 {:ok, activity}
146 end
147 end
148
149 def get_or_build_object(entry) do
150 with {:ok, activity} <- get_or_try_fetching(entry) do
151 {:ok, activity}
152 else
153 _e ->
154 with [object] <- :xmerl_xpath.string('/entry/activity:object', entry) do
155 NoteHandler.handle_note(object, object)
156 end
157 end
158 end
159
160 def get_or_try_fetching(entry) do
161 Logger.debug("Trying to get entry from db")
162
163 with id when not is_nil(id) <- string_from_xpath("//activity:object[1]/id", entry),
164 %Activity{} = activity <- Activity.get_create_by_object_ap_id_with_object(id) do
165 {:ok, activity}
166 else
167 _ ->
168 Logger.debug("Couldn't get, will try to fetch")
169
170 with href when not is_nil(href) <-
171 string_from_xpath("//activity:object[1]/link[@type=\"text/html\"]/@href", entry),
172 {:ok, [favorited_activity]} <- fetch_activity_from_url(href) do
173 {:ok, favorited_activity}
174 else
175 e -> Logger.debug("Couldn't find href: #{inspect(e)}")
176 end
177 end
178 end
179
180 def handle_favorite(entry, doc) do
181 with {:ok, favorited_activity} <- get_or_try_fetching(entry),
182 {:ok, activity} <- make_favorite(entry, doc, favorited_activity) do
183 {:ok, activity, favorited_activity}
184 else
185 e -> {:error, e}
186 end
187 end
188
189 def get_attachments(entry) do
190 :xmerl_xpath.string('/entry/link[@rel="enclosure"]', entry)
191 |> Enum.map(fn enclosure ->
192 with href when not is_nil(href) <- string_from_xpath("/link/@href", enclosure),
193 type when not is_nil(type) <- string_from_xpath("/link/@type", enclosure) do
194 %{
195 "type" => "Attachment",
196 "url" => [
197 %{
198 "type" => "Link",
199 "mediaType" => type,
200 "href" => href
201 }
202 ]
203 }
204 end
205 end)
206 |> Enum.filter(& &1)
207 end
208
209 @doc """
210 Gets the content from a an entry.
211 """
212 def get_content(entry) do
213 string_from_xpath("//content", entry)
214 end
215
216 @doc """
217 Get the cw that mastodon uses.
218 """
219 def get_cw(entry) do
220 with cw when not is_nil(cw) <- string_from_xpath("/*/summary", entry) do
221 cw
222 else
223 _e -> nil
224 end
225 end
226
227 def get_tags(entry) do
228 :xmerl_xpath.string('//category', entry)
229 |> Enum.map(fn category -> string_from_xpath("/category/@term", category) end)
230 |> Enum.filter(& &1)
231 |> Enum.map(&String.downcase/1)
232 end
233
234 def maybe_update(doc, user) do
235 if "true" == string_from_xpath("//author[1]/ap_enabled", doc) do
236 Transmogrifier.upgrade_user_from_ap_id(user.ap_id)
237 else
238 maybe_update_ostatus(doc, user)
239 end
240 end
241
242 def maybe_update_ostatus(doc, user) do
243 old_data = %{
244 avatar: user.avatar,
245 bio: user.bio,
246 name: user.name
247 }
248
249 with false <- user.local,
250 avatar <- make_avatar_object(doc),
251 bio <- string_from_xpath("//author[1]/summary", doc),
252 name <- string_from_xpath("//author[1]/poco:displayName", doc),
253 new_data <- %{
254 avatar: avatar || old_data.avatar,
255 name: name || old_data.name,
256 bio: bio || old_data.bio
257 },
258 false <- new_data == old_data do
259 change = Ecto.Changeset.change(user, new_data)
260 User.update_and_set_cache(change)
261 else
262 _ ->
263 {:ok, user}
264 end
265 end
266
267 def find_make_or_update_actor(doc) do
268 uri = string_from_xpath("//author/uri[1]", doc)
269
270 with {:ok, %User{} = user} <- find_or_make_user(uri),
271 {:ap_enabled, false} <- {:ap_enabled, User.ap_enabled?(user)} do
272 maybe_update(doc, user)
273 else
274 {:ap_enabled, true} ->
275 {:error, :invalid_protocol}
276
277 _ ->
278 {:error, :unknown_user}
279 end
280 end
281
282 def find_or_make_user(uri) do
283 query = from(user in User, where: user.ap_id == ^uri)
284
285 user = Repo.one(query)
286
287 if is_nil(user) do
288 make_user(uri)
289 else
290 {:ok, user}
291 end
292 end
293
294 def make_user(uri, update \\ false) do
295 with {:ok, info} <- gather_user_info(uri) do
296 data = %{
297 name: info["name"],
298 nickname: info["nickname"] <> "@" <> info["host"],
299 ap_id: info["uri"],
300 info: info,
301 avatar: info["avatar"],
302 bio: info["bio"]
303 }
304
305 with false <- update,
306 %User{} = user <- User.get_cached_by_ap_id(data.ap_id) do
307 {:ok, user}
308 else
309 _e -> User.insert_or_update_user(data)
310 end
311 end
312 end
313
314 # TODO: Just takes the first one for now.
315 def make_avatar_object(author_doc, rel \\ "avatar") do
316 href = string_from_xpath("//author[1]/link[@rel=\"#{rel}\"]/@href", author_doc)
317 type = string_from_xpath("//author[1]/link[@rel=\"#{rel}\"]/@type", author_doc)
318
319 if href do
320 %{
321 "type" => "Image",
322 "url" => [
323 %{
324 "type" => "Link",
325 "mediaType" => type,
326 "href" => href
327 }
328 ]
329 }
330 else
331 nil
332 end
333 end
334
335 def gather_user_info(username) do
336 with {:ok, webfinger_data} <- WebFinger.finger(username),
337 {:ok, feed_data} <- Websub.gather_feed_data(webfinger_data["topic"]) do
338 {:ok, Map.merge(webfinger_data, feed_data) |> Map.put("fqn", username)}
339 else
340 e ->
341 Logger.debug(fn -> "Couldn't gather info for #{username}" end)
342 {:error, e}
343 end
344 end
345
346 # Regex-based 'parsing' so we don't have to pull in a full html parser
347 # It's a hack anyway. Maybe revisit this in the future
348 @mastodon_regex ~r/<link href='(.*)' rel='alternate' type='application\/atom\+xml'>/
349 @gs_regex ~r/<link title=.* href="(.*)" type="application\/atom\+xml" rel="alternate">/
350 @gs_classic_regex ~r/<link rel="alternate" href="(.*)" type="application\/atom\+xml" title=.*>/
351 def get_atom_url(body) do
352 cond do
353 Regex.match?(@mastodon_regex, body) ->
354 [[_, match]] = Regex.scan(@mastodon_regex, body)
355 {:ok, match}
356
357 Regex.match?(@gs_regex, body) ->
358 [[_, match]] = Regex.scan(@gs_regex, body)
359 {:ok, match}
360
361 Regex.match?(@gs_classic_regex, body) ->
362 [[_, match]] = Regex.scan(@gs_classic_regex, body)
363 {:ok, match}
364
365 true ->
366 Logger.debug(fn -> "Couldn't find Atom link in #{inspect(body)}" end)
367 {:error, "Couldn't find the Atom link"}
368 end
369 end
370
371 def fetch_activity_from_atom_url(url, options \\ []) do
372 with true <- String.starts_with?(url, "http"),
373 {:ok, %{body: body, status: code}} when code in 200..299 <-
374 HTTP.get(
375 url,
376 [{:Accept, "application/atom+xml"}]
377 ) do
378 Logger.debug("Got document from #{url}, handling...")
379 handle_incoming(body, options)
380 else
381 e ->
382 Logger.debug("Couldn't get #{url}: #{inspect(e)}")
383 e
384 end
385 end
386
387 def fetch_activity_from_html_url(url, options \\ []) do
388 Logger.debug("Trying to fetch #{url}")
389
390 with true <- String.starts_with?(url, "http"),
391 {:ok, %{body: body}} <- HTTP.get(url, []),
392 {:ok, atom_url} <- get_atom_url(body) do
393 fetch_activity_from_atom_url(atom_url, options)
394 else
395 e ->
396 Logger.debug("Couldn't get #{url}: #{inspect(e)}")
397 e
398 end
399 end
400
401 def fetch_activity_from_url(url, options \\ []) do
402 with {:ok, [_ | _] = activities} <- fetch_activity_from_atom_url(url, options) do
403 {:ok, activities}
404 else
405 _e -> fetch_activity_from_html_url(url, options)
406 end
407 rescue
408 e ->
409 Logger.debug("Couldn't get #{url}: #{inspect(e)}")
410 {:error, "Couldn't get #{url}: #{inspect(e)}"}
411 end
412 end