Merge remote-tracking branch 'remotes/upstream/develop' into 161-incoming-replies...
[akkoma] / lib / pleroma / web / ostatus / ostatus.ex
1 # Pleroma: A lightweight social networking server
2 # Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
3 # SPDX-License-Identifier: AGPL-3.0-only
4
5 defmodule Pleroma.Web.OStatus do
6 import Ecto.Query
7 import Pleroma.Web.XML
8 require Logger
9
10 alias Pleroma.Activity
11 alias Pleroma.HTTP
12 alias Pleroma.Object
13 alias Pleroma.Repo
14 alias Pleroma.User
15 alias Pleroma.Web
16 alias Pleroma.Web.ActivityPub.ActivityPub
17 alias Pleroma.Web.ActivityPub.Transmogrifier
18 alias Pleroma.Web.ActivityPub.Visibility
19 alias Pleroma.Web.OStatus.DeleteHandler
20 alias Pleroma.Web.OStatus.FollowHandler
21 alias Pleroma.Web.OStatus.NoteHandler
22 alias Pleroma.Web.OStatus.UnfollowHandler
23 alias Pleroma.Web.WebFinger
24 alias Pleroma.Web.Websub
25
26 def is_representable?(%Activity{} = activity) do
27 object = Object.normalize(activity)
28
29 cond do
30 is_nil(object) ->
31 false
32
33 Visibility.is_public?(activity) && object.data["type"] == "Note" ->
34 true
35
36 true ->
37 false
38 end
39 end
40
41 def feed_path(user) do
42 "#{user.ap_id}/feed.atom"
43 end
44
45 def pubsub_path(user) do
46 "#{Web.base_url()}/push/hub/#{user.nickname}"
47 end
48
49 def salmon_path(user) do
50 "#{user.ap_id}/salmon"
51 end
52
53 def remote_follow_path do
54 "#{Web.base_url()}/ostatus_subscribe?acct={uri}"
55 end
56
57 def handle_incoming(xml_string, options \\ []) do
58 with doc when doc != :error <- parse_document(xml_string) do
59 with {:ok, actor_user} <- find_make_or_update_user(doc),
60 do: Pleroma.Instances.set_reachable(actor_user.ap_id)
61
62 entries = :xmerl_xpath.string('//entry', doc)
63
64 activities =
65 Enum.map(entries, fn entry ->
66 {:xmlObj, :string, object_type} =
67 :xmerl_xpath.string('string(/entry/activity:object-type[1])', entry)
68
69 {:xmlObj, :string, verb} = :xmerl_xpath.string('string(/entry/activity:verb[1])', entry)
70 Logger.debug("Handling #{verb}")
71
72 try do
73 case verb do
74 'http://activitystrea.ms/schema/1.0/delete' ->
75 with {:ok, activity} <- DeleteHandler.handle_delete(entry, doc), do: activity
76
77 'http://activitystrea.ms/schema/1.0/follow' ->
78 with {:ok, activity} <- FollowHandler.handle(entry, doc), do: activity
79
80 'http://activitystrea.ms/schema/1.0/unfollow' ->
81 with {:ok, activity} <- UnfollowHandler.handle(entry, doc), do: activity
82
83 'http://activitystrea.ms/schema/1.0/share' ->
84 with {:ok, activity, retweeted_activity} <- handle_share(entry, doc),
85 do: [activity, retweeted_activity]
86
87 'http://activitystrea.ms/schema/1.0/favorite' ->
88 with {:ok, activity, favorited_activity} <- handle_favorite(entry, doc),
89 do: [activity, favorited_activity]
90
91 _ ->
92 case object_type do
93 'http://activitystrea.ms/schema/1.0/note' ->
94 with {:ok, activity} <- NoteHandler.handle_note(entry, doc, options),
95 do: activity
96
97 'http://activitystrea.ms/schema/1.0/comment' ->
98 with {:ok, activity} <- NoteHandler.handle_note(entry, doc, options),
99 do: activity
100
101 _ ->
102 Logger.error("Couldn't parse incoming document")
103 nil
104 end
105 end
106 rescue
107 e ->
108 Logger.error("Error occured while handling activity")
109 Logger.error(xml_string)
110 Logger.error(inspect(e))
111 nil
112 end
113 end)
114 |> Enum.filter(& &1)
115
116 {:ok, activities}
117 else
118 _e -> {:error, []}
119 end
120 end
121
122 def make_share(entry, doc, retweeted_activity) do
123 with {:ok, actor} <- find_make_or_update_user(doc),
124 %Object{} = object <- Object.normalize(retweeted_activity),
125 id when not is_nil(id) <- string_from_xpath("/entry/id", entry),
126 {:ok, activity, _object} = ActivityPub.announce(actor, object, id, false) do
127 {:ok, activity}
128 end
129 end
130
131 def handle_share(entry, doc) do
132 with {:ok, retweeted_activity} <- get_or_build_object(entry),
133 {:ok, activity} <- make_share(entry, doc, retweeted_activity) do
134 {:ok, activity, retweeted_activity}
135 else
136 e -> {:error, e}
137 end
138 end
139
140 def make_favorite(entry, doc, favorited_activity) do
141 with {:ok, actor} <- find_make_or_update_user(doc),
142 %Object{} = object <- Object.normalize(favorited_activity),
143 id when not is_nil(id) <- string_from_xpath("/entry/id", entry),
144 {:ok, activity, _object} = ActivityPub.like(actor, object, id, false) do
145 {:ok, activity}
146 end
147 end
148
149 def get_or_build_object(entry) do
150 with {:ok, activity} <- get_or_try_fetching(entry) do
151 {:ok, activity}
152 else
153 _e ->
154 with [object] <- :xmerl_xpath.string('/entry/activity:object', entry) do
155 NoteHandler.handle_note(object, object)
156 end
157 end
158 end
159
160 def get_or_try_fetching(entry) do
161 Logger.debug("Trying to get entry from db")
162
163 with id when not is_nil(id) <- string_from_xpath("//activity:object[1]/id", entry),
164 %Activity{} = activity <- Activity.get_create_by_object_ap_id_with_object(id) do
165 {:ok, activity}
166 else
167 _ ->
168 Logger.debug("Couldn't get, will try to fetch")
169
170 with href when not is_nil(href) <-
171 string_from_xpath("//activity:object[1]/link[@type=\"text/html\"]/@href", entry),
172 {:ok, [favorited_activity]} <- fetch_activity_from_url(href) do
173 {:ok, favorited_activity}
174 else
175 e -> Logger.debug("Couldn't find href: #{inspect(e)}")
176 end
177 end
178 end
179
180 def handle_favorite(entry, doc) do
181 with {:ok, favorited_activity} <- get_or_try_fetching(entry),
182 {:ok, activity} <- make_favorite(entry, doc, favorited_activity) do
183 {:ok, activity, favorited_activity}
184 else
185 e -> {:error, e}
186 end
187 end
188
189 def get_attachments(entry) do
190 :xmerl_xpath.string('/entry/link[@rel="enclosure"]', entry)
191 |> Enum.map(fn enclosure ->
192 with href when not is_nil(href) <- string_from_xpath("/link/@href", enclosure),
193 type when not is_nil(type) <- string_from_xpath("/link/@type", enclosure) do
194 %{
195 "type" => "Attachment",
196 "url" => [
197 %{
198 "type" => "Link",
199 "mediaType" => type,
200 "href" => href
201 }
202 ]
203 }
204 end
205 end)
206 |> Enum.filter(& &1)
207 end
208
209 @doc """
210 Gets the content from a an entry.
211 """
212 def get_content(entry) do
213 string_from_xpath("//content", entry)
214 end
215
216 @doc """
217 Get the cw that mastodon uses.
218 """
219 def get_cw(entry) do
220 with cw when not is_nil(cw) <- string_from_xpath("/*/summary", entry) do
221 cw
222 else
223 _e -> nil
224 end
225 end
226
227 def get_tags(entry) do
228 :xmerl_xpath.string('//category', entry)
229 |> Enum.map(fn category -> string_from_xpath("/category/@term", category) end)
230 |> Enum.filter(& &1)
231 |> Enum.map(&String.downcase/1)
232 end
233
234 def maybe_update(doc, user) do
235 if "true" == string_from_xpath("//author[1]/ap_enabled", doc) do
236 Transmogrifier.upgrade_user_from_ap_id(user.ap_id)
237 else
238 maybe_update_ostatus(doc, user)
239 end
240 end
241
242 def maybe_update_ostatus(doc, user) do
243 old_data = %{
244 avatar: user.avatar,
245 bio: user.bio,
246 name: user.name
247 }
248
249 with false <- user.local,
250 avatar <- make_avatar_object(doc),
251 bio <- string_from_xpath("//author[1]/summary", doc),
252 name <- string_from_xpath("//author[1]/poco:displayName", doc),
253 new_data <- %{
254 avatar: avatar || old_data.avatar,
255 name: name || old_data.name,
256 bio: bio || old_data.bio
257 },
258 false <- new_data == old_data do
259 change = Ecto.Changeset.change(user, new_data)
260 User.update_and_set_cache(change)
261 else
262 _ ->
263 {:ok, user}
264 end
265 end
266
267 def find_make_or_update_user(doc) do
268 uri = string_from_xpath("//author/uri[1]", doc)
269
270 with {:ok, user} <- find_or_make_user(uri) do
271 maybe_update(doc, user)
272 end
273 end
274
275 def find_or_make_user(uri) do
276 query = from(user in User, where: user.ap_id == ^uri)
277
278 user = Repo.one(query)
279
280 if is_nil(user) do
281 make_user(uri)
282 else
283 {:ok, user}
284 end
285 end
286
287 def make_user(uri, update \\ false) do
288 with {:ok, info} <- gather_user_info(uri) do
289 data = %{
290 name: info["name"],
291 nickname: info["nickname"] <> "@" <> info["host"],
292 ap_id: info["uri"],
293 info: info,
294 avatar: info["avatar"],
295 bio: info["bio"]
296 }
297
298 with false <- update,
299 %User{} = user <- User.get_cached_by_ap_id(data.ap_id) do
300 {:ok, user}
301 else
302 _e -> User.insert_or_update_user(data)
303 end
304 end
305 end
306
307 # TODO: Just takes the first one for now.
308 def make_avatar_object(author_doc, rel \\ "avatar") do
309 href = string_from_xpath("//author[1]/link[@rel=\"#{rel}\"]/@href", author_doc)
310 type = string_from_xpath("//author[1]/link[@rel=\"#{rel}\"]/@type", author_doc)
311
312 if href do
313 %{
314 "type" => "Image",
315 "url" => [
316 %{
317 "type" => "Link",
318 "mediaType" => type,
319 "href" => href
320 }
321 ]
322 }
323 else
324 nil
325 end
326 end
327
328 def gather_user_info(username) do
329 with {:ok, webfinger_data} <- WebFinger.finger(username),
330 {:ok, feed_data} <- Websub.gather_feed_data(webfinger_data["topic"]) do
331 {:ok, Map.merge(webfinger_data, feed_data) |> Map.put("fqn", username)}
332 else
333 e ->
334 Logger.debug(fn -> "Couldn't gather info for #{username}" end)
335 {:error, e}
336 end
337 end
338
339 # Regex-based 'parsing' so we don't have to pull in a full html parser
340 # It's a hack anyway. Maybe revisit this in the future
341 @mastodon_regex ~r/<link href='(.*)' rel='alternate' type='application\/atom\+xml'>/
342 @gs_regex ~r/<link title=.* href="(.*)" type="application\/atom\+xml" rel="alternate">/
343 @gs_classic_regex ~r/<link rel="alternate" href="(.*)" type="application\/atom\+xml" title=.*>/
344 def get_atom_url(body) do
345 cond do
346 Regex.match?(@mastodon_regex, body) ->
347 [[_, match]] = Regex.scan(@mastodon_regex, body)
348 {:ok, match}
349
350 Regex.match?(@gs_regex, body) ->
351 [[_, match]] = Regex.scan(@gs_regex, body)
352 {:ok, match}
353
354 Regex.match?(@gs_classic_regex, body) ->
355 [[_, match]] = Regex.scan(@gs_classic_regex, body)
356 {:ok, match}
357
358 true ->
359 Logger.debug(fn -> "Couldn't find Atom link in #{inspect(body)}" end)
360 {:error, "Couldn't find the Atom link"}
361 end
362 end
363
364 def fetch_activity_from_atom_url(url, options \\ []) do
365 with true <- String.starts_with?(url, "http"),
366 {:ok, %{body: body, status: code}} when code in 200..299 <-
367 HTTP.get(
368 url,
369 [{:Accept, "application/atom+xml"}]
370 ) do
371 Logger.debug("Got document from #{url}, handling...")
372 handle_incoming(body, options)
373 else
374 e ->
375 Logger.debug("Couldn't get #{url}: #{inspect(e)}")
376 e
377 end
378 end
379
380 def fetch_activity_from_html_url(url, options \\ []) do
381 Logger.debug("Trying to fetch #{url}")
382
383 with true <- String.starts_with?(url, "http"),
384 {:ok, %{body: body}} <- HTTP.get(url, []),
385 {:ok, atom_url} <- get_atom_url(body) do
386 fetch_activity_from_atom_url(atom_url, options)
387 else
388 e ->
389 Logger.debug("Couldn't get #{url}: #{inspect(e)}")
390 e
391 end
392 end
393
394 def fetch_activity_from_url(url, options \\ []) do
395 with {:ok, [_ | _] = activities} <- fetch_activity_from_atom_url(url, options) do
396 {:ok, activities}
397 else
398 _e -> fetch_activity_from_html_url(url, options)
399 end
400 rescue
401 e ->
402 Logger.debug("Couldn't get #{url}: #{inspect(e)}")
403 {:error, "Couldn't get #{url}: #{inspect(e)}"}
404 end
405 end