Merge branch 'hj-happiness-improvement' into 'develop'
[akkoma] / lib / pleroma / web / ostatus / ostatus.ex
1 # Pleroma: A lightweight social networking server
2 # Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
3 # SPDX-License-Identifier: AGPL-3.0-only
4
5 defmodule Pleroma.Web.OStatus do
6 @httpoison Application.get_env(:pleroma, :httpoison)
7
8 import Ecto.Query
9 import Pleroma.Web.XML
10 require Logger
11
12 alias Pleroma.{Repo, User, Web, Object, Activity}
13 alias Pleroma.Web.ActivityPub.ActivityPub
14 alias Pleroma.Web.{WebFinger, Websub}
15 alias Pleroma.Web.OStatus.{FollowHandler, UnfollowHandler, NoteHandler, DeleteHandler}
16 alias Pleroma.Web.ActivityPub.Transmogrifier
17
18 def is_representable?(%Activity{data: data}) do
19 object = Object.normalize(data["object"])
20
21 cond do
22 is_nil(object) ->
23 false
24
25 object.data["type"] == "Note" ->
26 true
27
28 true ->
29 false
30 end
31 end
32
33 def feed_path(user) do
34 "#{user.ap_id}/feed.atom"
35 end
36
37 def pubsub_path(user) do
38 "#{Web.base_url()}/push/hub/#{user.nickname}"
39 end
40
41 def salmon_path(user) do
42 "#{user.ap_id}/salmon"
43 end
44
45 def remote_follow_path do
46 "#{Web.base_url()}/ostatus_subscribe?acct={uri}"
47 end
48
49 def handle_incoming(xml_string) do
50 with doc when doc != :error <- parse_document(xml_string) do
51 entries = :xmerl_xpath.string('//entry', doc)
52
53 activities =
54 Enum.map(entries, fn entry ->
55 {:xmlObj, :string, object_type} =
56 :xmerl_xpath.string('string(/entry/activity:object-type[1])', entry)
57
58 {:xmlObj, :string, verb} = :xmerl_xpath.string('string(/entry/activity:verb[1])', entry)
59 Logger.debug("Handling #{verb}")
60
61 try do
62 case verb do
63 'http://activitystrea.ms/schema/1.0/delete' ->
64 with {:ok, activity} <- DeleteHandler.handle_delete(entry, doc), do: activity
65
66 'http://activitystrea.ms/schema/1.0/follow' ->
67 with {:ok, activity} <- FollowHandler.handle(entry, doc), do: activity
68
69 'http://activitystrea.ms/schema/1.0/unfollow' ->
70 with {:ok, activity} <- UnfollowHandler.handle(entry, doc), do: activity
71
72 'http://activitystrea.ms/schema/1.0/share' ->
73 with {:ok, activity, retweeted_activity} <- handle_share(entry, doc),
74 do: [activity, retweeted_activity]
75
76 'http://activitystrea.ms/schema/1.0/favorite' ->
77 with {:ok, activity, favorited_activity} <- handle_favorite(entry, doc),
78 do: [activity, favorited_activity]
79
80 _ ->
81 case object_type do
82 'http://activitystrea.ms/schema/1.0/note' ->
83 with {:ok, activity} <- NoteHandler.handle_note(entry, doc), do: activity
84
85 'http://activitystrea.ms/schema/1.0/comment' ->
86 with {:ok, activity} <- NoteHandler.handle_note(entry, doc), do: activity
87
88 _ ->
89 Logger.error("Couldn't parse incoming document")
90 nil
91 end
92 end
93 rescue
94 e ->
95 Logger.error("Error occured while handling activity")
96 Logger.error(xml_string)
97 Logger.error(inspect(e))
98 nil
99 end
100 end)
101 |> Enum.filter(& &1)
102
103 {:ok, activities}
104 else
105 _e -> {:error, []}
106 end
107 end
108
109 def make_share(entry, doc, retweeted_activity) do
110 with {:ok, actor} <- find_make_or_update_user(doc),
111 %Object{} = object <- Object.normalize(retweeted_activity.data["object"]),
112 id when not is_nil(id) <- string_from_xpath("/entry/id", entry),
113 {:ok, activity, _object} = ActivityPub.announce(actor, object, id, false) do
114 {:ok, activity}
115 end
116 end
117
118 def handle_share(entry, doc) do
119 with {:ok, retweeted_activity} <- get_or_build_object(entry),
120 {:ok, activity} <- make_share(entry, doc, retweeted_activity) do
121 {:ok, activity, retweeted_activity}
122 else
123 e -> {:error, e}
124 end
125 end
126
127 def make_favorite(entry, doc, favorited_activity) do
128 with {:ok, actor} <- find_make_or_update_user(doc),
129 %Object{} = object <- Object.normalize(favorited_activity.data["object"]),
130 id when not is_nil(id) <- string_from_xpath("/entry/id", entry),
131 {:ok, activity, _object} = ActivityPub.like(actor, object, id, false) do
132 {:ok, activity}
133 end
134 end
135
136 def get_or_build_object(entry) do
137 with {:ok, activity} <- get_or_try_fetching(entry) do
138 {:ok, activity}
139 else
140 _e ->
141 with [object] <- :xmerl_xpath.string('/entry/activity:object', entry) do
142 NoteHandler.handle_note(object, object)
143 end
144 end
145 end
146
147 def get_or_try_fetching(entry) do
148 Logger.debug("Trying to get entry from db")
149
150 with id when not is_nil(id) <- string_from_xpath("//activity:object[1]/id", entry),
151 %Activity{} = activity <- Activity.get_create_by_object_ap_id(id) do
152 {:ok, activity}
153 else
154 _ ->
155 Logger.debug("Couldn't get, will try to fetch")
156
157 with href when not is_nil(href) <-
158 string_from_xpath("//activity:object[1]/link[@type=\"text/html\"]/@href", entry),
159 {:ok, [favorited_activity]} <- fetch_activity_from_url(href) do
160 {:ok, favorited_activity}
161 else
162 e -> Logger.debug("Couldn't find href: #{inspect(e)}")
163 end
164 end
165 end
166
167 def handle_favorite(entry, doc) do
168 with {:ok, favorited_activity} <- get_or_try_fetching(entry),
169 {:ok, activity} <- make_favorite(entry, doc, favorited_activity) do
170 {:ok, activity, favorited_activity}
171 else
172 e -> {:error, e}
173 end
174 end
175
176 def get_attachments(entry) do
177 :xmerl_xpath.string('/entry/link[@rel="enclosure"]', entry)
178 |> Enum.map(fn enclosure ->
179 with href when not is_nil(href) <- string_from_xpath("/link/@href", enclosure),
180 type when not is_nil(type) <- string_from_xpath("/link/@type", enclosure) do
181 %{
182 "type" => "Attachment",
183 "url" => [
184 %{
185 "type" => "Link",
186 "mediaType" => type,
187 "href" => href
188 }
189 ]
190 }
191 end
192 end)
193 |> Enum.filter(& &1)
194 end
195
196 @doc """
197 Gets the content from a an entry.
198 """
199 def get_content(entry) do
200 string_from_xpath("//content", entry)
201 end
202
203 @doc """
204 Get the cw that mastodon uses.
205 """
206 def get_cw(entry) do
207 with cw when not is_nil(cw) <- string_from_xpath("/*/summary", entry) do
208 cw
209 else
210 _e -> nil
211 end
212 end
213
214 def get_tags(entry) do
215 :xmerl_xpath.string('//category', entry)
216 |> Enum.map(fn category -> string_from_xpath("/category/@term", category) end)
217 |> Enum.filter(& &1)
218 |> Enum.map(&String.downcase/1)
219 end
220
221 def maybe_update(doc, user) do
222 if "true" == string_from_xpath("//author[1]/ap_enabled", doc) do
223 Transmogrifier.upgrade_user_from_ap_id(user.ap_id)
224 else
225 maybe_update_ostatus(doc, user)
226 end
227 end
228
229 def maybe_update_ostatus(doc, user) do
230 old_data = %{
231 avatar: user.avatar,
232 bio: user.bio,
233 name: user.name
234 }
235
236 with false <- user.local,
237 avatar <- make_avatar_object(doc),
238 bio <- string_from_xpath("//author[1]/summary", doc),
239 name <- string_from_xpath("//author[1]/poco:displayName", doc),
240 new_data <- %{
241 avatar: avatar || old_data.avatar,
242 name: name || old_data.name,
243 bio: bio || old_data.bio
244 },
245 false <- new_data == old_data do
246 change = Ecto.Changeset.change(user, new_data)
247 User.update_and_set_cache(change)
248 else
249 _ ->
250 {:ok, user}
251 end
252 end
253
254 def find_make_or_update_user(doc) do
255 uri = string_from_xpath("//author/uri[1]", doc)
256
257 with {:ok, user} <- find_or_make_user(uri) do
258 maybe_update(doc, user)
259 end
260 end
261
262 def find_or_make_user(uri) do
263 query = from(user in User, where: user.ap_id == ^uri)
264
265 user = Repo.one(query)
266
267 if is_nil(user) do
268 make_user(uri)
269 else
270 {:ok, user}
271 end
272 end
273
274 def make_user(uri, update \\ false) do
275 with {:ok, info} <- gather_user_info(uri) do
276 data = %{
277 name: info["name"],
278 nickname: info["nickname"] <> "@" <> info["host"],
279 ap_id: info["uri"],
280 info: info,
281 avatar: info["avatar"],
282 bio: info["bio"]
283 }
284
285 with false <- update,
286 %User{} = user <- User.get_by_ap_id(data.ap_id) do
287 {:ok, user}
288 else
289 _e -> User.insert_or_update_user(data)
290 end
291 end
292 end
293
294 # TODO: Just takes the first one for now.
295 def make_avatar_object(author_doc, rel \\ "avatar") do
296 href = string_from_xpath("//author[1]/link[@rel=\"#{rel}\"]/@href", author_doc)
297 type = string_from_xpath("//author[1]/link[@rel=\"#{rel}\"]/@type", author_doc)
298
299 if href do
300 %{
301 "type" => "Image",
302 "url" => [
303 %{
304 "type" => "Link",
305 "mediaType" => type,
306 "href" => href
307 }
308 ]
309 }
310 else
311 nil
312 end
313 end
314
315 def gather_user_info(username) do
316 with {:ok, webfinger_data} <- WebFinger.finger(username),
317 {:ok, feed_data} <- Websub.gather_feed_data(webfinger_data["topic"]) do
318 {:ok, Map.merge(webfinger_data, feed_data) |> Map.put("fqn", username)}
319 else
320 e ->
321 Logger.debug(fn -> "Couldn't gather info for #{username}" end)
322 {:error, e}
323 end
324 end
325
326 # Regex-based 'parsing' so we don't have to pull in a full html parser
327 # It's a hack anyway. Maybe revisit this in the future
328 @mastodon_regex ~r/<link href='(.*)' rel='alternate' type='application\/atom\+xml'>/
329 @gs_regex ~r/<link title=.* href="(.*)" type="application\/atom\+xml" rel="alternate">/
330 @gs_classic_regex ~r/<link rel="alternate" href="(.*)" type="application\/atom\+xml" title=.*>/
331 def get_atom_url(body) do
332 cond do
333 Regex.match?(@mastodon_regex, body) ->
334 [[_, match]] = Regex.scan(@mastodon_regex, body)
335 {:ok, match}
336
337 Regex.match?(@gs_regex, body) ->
338 [[_, match]] = Regex.scan(@gs_regex, body)
339 {:ok, match}
340
341 Regex.match?(@gs_classic_regex, body) ->
342 [[_, match]] = Regex.scan(@gs_classic_regex, body)
343 {:ok, match}
344
345 true ->
346 Logger.debug(fn -> "Couldn't find Atom link in #{inspect(body)}" end)
347 {:error, "Couldn't find the Atom link"}
348 end
349 end
350
351 def fetch_activity_from_atom_url(url) do
352 with true <- String.starts_with?(url, "http"),
353 {:ok, %{body: body, status: code}} when code in 200..299 <-
354 @httpoison.get(
355 url,
356 [{:Accept, "application/atom+xml"}]
357 ) do
358 Logger.debug("Got document from #{url}, handling...")
359 handle_incoming(body)
360 else
361 e ->
362 Logger.debug("Couldn't get #{url}: #{inspect(e)}")
363 e
364 end
365 end
366
367 def fetch_activity_from_html_url(url) do
368 Logger.debug("Trying to fetch #{url}")
369
370 with true <- String.starts_with?(url, "http"),
371 {:ok, %{body: body}} <- @httpoison.get(url, []),
372 {:ok, atom_url} <- get_atom_url(body) do
373 fetch_activity_from_atom_url(atom_url)
374 else
375 e ->
376 Logger.debug("Couldn't get #{url}: #{inspect(e)}")
377 e
378 end
379 end
380
381 def fetch_activity_from_url(url) do
382 with {:ok, [_ | _] = activities} <- fetch_activity_from_atom_url(url) do
383 {:ok, activities}
384 else
385 _e -> fetch_activity_from_html_url(url)
386 end
387 rescue
388 e ->
389 Logger.debug("Couldn't get #{url}: #{inspect(e)}")
390 {:error, "Couldn't get #{url}: #{inspect(e)}"}
391 end
392 end