Merge develop to bump elixir version in the CI so I don't get failing formatting
[akkoma] / lib / pleroma / web / ostatus / ostatus.ex
1 # Pleroma: A lightweight social networking server
2 # Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
3 # SPDX-License-Identifier: AGPL-3.0-only
4
5 defmodule Pleroma.Web.OStatus do
6 @httpoison Application.get_env(:pleroma, :httpoison)
7
8 import Ecto.Query
9 import Pleroma.Web.XML
10 require Logger
11
12 alias Pleroma.Activity
13 alias Pleroma.Object
14 alias Pleroma.Repo
15 alias Pleroma.User
16 alias Pleroma.Web
17 alias Pleroma.Web.ActivityPub.ActivityPub
18 alias Pleroma.Web.ActivityPub.Transmogrifier
19 alias Pleroma.Web.OStatus.DeleteHandler
20 alias Pleroma.Web.OStatus.FollowHandler
21 alias Pleroma.Web.OStatus.NoteHandler
22 alias Pleroma.Web.OStatus.UnfollowHandler
23 alias Pleroma.Web.WebFinger
24 alias Pleroma.Web.Websub
25
26 def is_representable?(%Activity{data: data}) do
27 object = Object.normalize(data["object"])
28
29 cond do
30 is_nil(object) ->
31 false
32
33 object.data["type"] == "Note" ->
34 true
35
36 true ->
37 false
38 end
39 end
40
41 def feed_path(user) do
42 "#{user.ap_id}/feed.atom"
43 end
44
45 def pubsub_path(user) do
46 "#{Web.base_url()}/push/hub/#{user.nickname}"
47 end
48
49 def salmon_path(user) do
50 "#{user.ap_id}/salmon"
51 end
52
53 def remote_follow_path do
54 "#{Web.base_url()}/ostatus_subscribe?acct={uri}"
55 end
56
57 def handle_incoming(xml_string) do
58 with doc when doc != :error <- parse_document(xml_string) do
59 with {:ok, actor_user} <- find_make_or_update_user(doc),
60 do: Pleroma.Instances.set_reachable(actor_user.ap_id)
61
62 entries = :xmerl_xpath.string('//entry', doc)
63
64 activities =
65 Enum.map(entries, fn entry ->
66 {:xmlObj, :string, object_type} =
67 :xmerl_xpath.string('string(/entry/activity:object-type[1])', entry)
68
69 {:xmlObj, :string, verb} = :xmerl_xpath.string('string(/entry/activity:verb[1])', entry)
70 Logger.debug("Handling #{verb}")
71
72 try do
73 case verb do
74 'http://activitystrea.ms/schema/1.0/delete' ->
75 with {:ok, activity} <- DeleteHandler.handle_delete(entry, doc), do: activity
76
77 'http://activitystrea.ms/schema/1.0/follow' ->
78 with {:ok, activity} <- FollowHandler.handle(entry, doc), do: activity
79
80 'http://activitystrea.ms/schema/1.0/unfollow' ->
81 with {:ok, activity} <- UnfollowHandler.handle(entry, doc), do: activity
82
83 'http://activitystrea.ms/schema/1.0/share' ->
84 with {:ok, activity, retweeted_activity} <- handle_share(entry, doc),
85 do: [activity, retweeted_activity]
86
87 'http://activitystrea.ms/schema/1.0/favorite' ->
88 with {:ok, activity, favorited_activity} <- handle_favorite(entry, doc),
89 do: [activity, favorited_activity]
90
91 _ ->
92 case object_type do
93 'http://activitystrea.ms/schema/1.0/note' ->
94 with {:ok, activity} <- NoteHandler.handle_note(entry, doc), do: activity
95
96 'http://activitystrea.ms/schema/1.0/comment' ->
97 with {:ok, activity} <- NoteHandler.handle_note(entry, doc), do: activity
98
99 _ ->
100 Logger.error("Couldn't parse incoming document")
101 nil
102 end
103 end
104 rescue
105 e ->
106 Logger.error("Error occured while handling activity")
107 Logger.error(xml_string)
108 Logger.error(inspect(e))
109 nil
110 end
111 end)
112 |> Enum.filter(& &1)
113
114 {:ok, activities}
115 else
116 _e -> {:error, []}
117 end
118 end
119
120 def make_share(entry, doc, retweeted_activity) do
121 with {:ok, actor} <- find_make_or_update_user(doc),
122 %Object{} = object <- Object.normalize(retweeted_activity.data["object"]),
123 id when not is_nil(id) <- string_from_xpath("/entry/id", entry),
124 {:ok, activity, _object} = ActivityPub.announce(actor, object, id, false) do
125 {:ok, activity}
126 end
127 end
128
129 def handle_share(entry, doc) do
130 with {:ok, retweeted_activity} <- get_or_build_object(entry),
131 {:ok, activity} <- make_share(entry, doc, retweeted_activity) do
132 {:ok, activity, retweeted_activity}
133 else
134 e -> {:error, e}
135 end
136 end
137
138 def make_favorite(entry, doc, favorited_activity) do
139 with {:ok, actor} <- find_make_or_update_user(doc),
140 %Object{} = object <- Object.normalize(favorited_activity.data["object"]),
141 id when not is_nil(id) <- string_from_xpath("/entry/id", entry),
142 {:ok, activity, _object} = ActivityPub.like(actor, object, id, false) do
143 {:ok, activity}
144 end
145 end
146
147 def get_or_build_object(entry) do
148 with {:ok, activity} <- get_or_try_fetching(entry) do
149 {:ok, activity}
150 else
151 _e ->
152 with [object] <- :xmerl_xpath.string('/entry/activity:object', entry) do
153 NoteHandler.handle_note(object, object)
154 end
155 end
156 end
157
158 def get_or_try_fetching(entry) do
159 Logger.debug("Trying to get entry from db")
160
161 with id when not is_nil(id) <- string_from_xpath("//activity:object[1]/id", entry),
162 %Activity{} = activity <- Activity.get_create_by_object_ap_id(id) do
163 {:ok, activity}
164 else
165 _ ->
166 Logger.debug("Couldn't get, will try to fetch")
167
168 with href when not is_nil(href) <-
169 string_from_xpath("//activity:object[1]/link[@type=\"text/html\"]/@href", entry),
170 {:ok, [favorited_activity]} <- fetch_activity_from_url(href) do
171 {:ok, favorited_activity}
172 else
173 e -> Logger.debug("Couldn't find href: #{inspect(e)}")
174 end
175 end
176 end
177
178 def handle_favorite(entry, doc) do
179 with {:ok, favorited_activity} <- get_or_try_fetching(entry),
180 {:ok, activity} <- make_favorite(entry, doc, favorited_activity) do
181 {:ok, activity, favorited_activity}
182 else
183 e -> {:error, e}
184 end
185 end
186
187 def get_attachments(entry) do
188 :xmerl_xpath.string('/entry/link[@rel="enclosure"]', entry)
189 |> Enum.map(fn enclosure ->
190 with href when not is_nil(href) <- string_from_xpath("/link/@href", enclosure),
191 type when not is_nil(type) <- string_from_xpath("/link/@type", enclosure) do
192 %{
193 "type" => "Attachment",
194 "url" => [
195 %{
196 "type" => "Link",
197 "mediaType" => type,
198 "href" => href
199 }
200 ]
201 }
202 end
203 end)
204 |> Enum.filter(& &1)
205 end
206
207 @doc """
208 Gets the content from a an entry.
209 """
210 def get_content(entry) do
211 string_from_xpath("//content", entry)
212 end
213
214 @doc """
215 Get the cw that mastodon uses.
216 """
217 def get_cw(entry) do
218 with cw when not is_nil(cw) <- string_from_xpath("/*/summary", entry) do
219 cw
220 else
221 _e -> nil
222 end
223 end
224
225 def get_tags(entry) do
226 :xmerl_xpath.string('//category', entry)
227 |> Enum.map(fn category -> string_from_xpath("/category/@term", category) end)
228 |> Enum.filter(& &1)
229 |> Enum.map(&String.downcase/1)
230 end
231
232 def maybe_update(doc, user) do
233 if "true" == string_from_xpath("//author[1]/ap_enabled", doc) do
234 Transmogrifier.upgrade_user_from_ap_id(user.ap_id)
235 else
236 maybe_update_ostatus(doc, user)
237 end
238 end
239
240 def maybe_update_ostatus(doc, user) do
241 old_data = %{
242 avatar: user.avatar,
243 bio: user.bio,
244 name: user.name
245 }
246
247 with false <- user.local,
248 avatar <- make_avatar_object(doc),
249 bio <- string_from_xpath("//author[1]/summary", doc),
250 name <- string_from_xpath("//author[1]/poco:displayName", doc),
251 new_data <- %{
252 avatar: avatar || old_data.avatar,
253 name: name || old_data.name,
254 bio: bio || old_data.bio
255 },
256 false <- new_data == old_data do
257 change = Ecto.Changeset.change(user, new_data)
258 User.update_and_set_cache(change)
259 else
260 _ ->
261 {:ok, user}
262 end
263 end
264
265 def find_make_or_update_user(doc) do
266 uri = string_from_xpath("//author/uri[1]", doc)
267
268 with {:ok, user} <- find_or_make_user(uri) do
269 maybe_update(doc, user)
270 end
271 end
272
273 def find_or_make_user(uri) do
274 query = from(user in User, where: user.ap_id == ^uri)
275
276 user = Repo.one(query)
277
278 if is_nil(user) do
279 make_user(uri)
280 else
281 {:ok, user}
282 end
283 end
284
285 def make_user(uri, update \\ false) do
286 with {:ok, info} <- gather_user_info(uri) do
287 data = %{
288 name: info["name"],
289 nickname: info["nickname"] <> "@" <> info["host"],
290 ap_id: info["uri"],
291 info: info,
292 avatar: info["avatar"],
293 bio: info["bio"]
294 }
295
296 with false <- update,
297 %User{} = user <- User.get_by_ap_id(data.ap_id) do
298 {:ok, user}
299 else
300 _e -> User.insert_or_update_user(data)
301 end
302 end
303 end
304
305 # TODO: Just takes the first one for now.
306 def make_avatar_object(author_doc, rel \\ "avatar") do
307 href = string_from_xpath("//author[1]/link[@rel=\"#{rel}\"]/@href", author_doc)
308 type = string_from_xpath("//author[1]/link[@rel=\"#{rel}\"]/@type", author_doc)
309
310 if href do
311 %{
312 "type" => "Image",
313 "url" => [
314 %{
315 "type" => "Link",
316 "mediaType" => type,
317 "href" => href
318 }
319 ]
320 }
321 else
322 nil
323 end
324 end
325
326 def gather_user_info(username) do
327 with {:ok, webfinger_data} <- WebFinger.finger(username),
328 {:ok, feed_data} <- Websub.gather_feed_data(webfinger_data["topic"]) do
329 {:ok, Map.merge(webfinger_data, feed_data) |> Map.put("fqn", username)}
330 else
331 e ->
332 Logger.debug(fn -> "Couldn't gather info for #{username}" end)
333 {:error, e}
334 end
335 end
336
337 # Regex-based 'parsing' so we don't have to pull in a full html parser
338 # It's a hack anyway. Maybe revisit this in the future
339 @mastodon_regex ~r/<link href='(.*)' rel='alternate' type='application\/atom\+xml'>/
340 @gs_regex ~r/<link title=.* href="(.*)" type="application\/atom\+xml" rel="alternate">/
341 @gs_classic_regex ~r/<link rel="alternate" href="(.*)" type="application\/atom\+xml" title=.*>/
342 def get_atom_url(body) do
343 cond do
344 Regex.match?(@mastodon_regex, body) ->
345 [[_, match]] = Regex.scan(@mastodon_regex, body)
346 {:ok, match}
347
348 Regex.match?(@gs_regex, body) ->
349 [[_, match]] = Regex.scan(@gs_regex, body)
350 {:ok, match}
351
352 Regex.match?(@gs_classic_regex, body) ->
353 [[_, match]] = Regex.scan(@gs_classic_regex, body)
354 {:ok, match}
355
356 true ->
357 Logger.debug(fn -> "Couldn't find Atom link in #{inspect(body)}" end)
358 {:error, "Couldn't find the Atom link"}
359 end
360 end
361
362 def fetch_activity_from_atom_url(url) do
363 with true <- String.starts_with?(url, "http"),
364 {:ok, %{body: body, status: code}} when code in 200..299 <-
365 @httpoison.get(
366 url,
367 [{:Accept, "application/atom+xml"}]
368 ) do
369 Logger.debug("Got document from #{url}, handling...")
370 handle_incoming(body)
371 else
372 e ->
373 Logger.debug("Couldn't get #{url}: #{inspect(e)}")
374 e
375 end
376 end
377
378 def fetch_activity_from_html_url(url) do
379 Logger.debug("Trying to fetch #{url}")
380
381 with true <- String.starts_with?(url, "http"),
382 {:ok, %{body: body}} <- @httpoison.get(url, []),
383 {:ok, atom_url} <- get_atom_url(body) do
384 fetch_activity_from_atom_url(atom_url)
385 else
386 e ->
387 Logger.debug("Couldn't get #{url}: #{inspect(e)}")
388 e
389 end
390 end
391
392 def fetch_activity_from_url(url) do
393 with {:ok, [_ | _] = activities} <- fetch_activity_from_atom_url(url) do
394 {:ok, activities}
395 else
396 _e -> fetch_activity_from_html_url(url)
397 end
398 rescue
399 e ->
400 Logger.debug("Couldn't get #{url}: #{inspect(e)}")
401 {:error, "Couldn't get #{url}: #{inspect(e)}"}
402 end
403 end