remove pool and timeout options which duplicate with the default
[akkoma] / lib / pleroma / web / ostatus / ostatus.ex
1 defmodule Pleroma.Web.OStatus do
2 @httpoison Application.get_env(:pleroma, :httpoison)
3
4 import Ecto.Query
5 import Pleroma.Web.XML
6 require Logger
7
8 alias Pleroma.{Repo, User, Web, Object, Activity}
9 alias Pleroma.Web.ActivityPub.ActivityPub
10 alias Pleroma.Web.{WebFinger, Websub}
11 alias Pleroma.Web.OStatus.{FollowHandler, UnfollowHandler, NoteHandler, DeleteHandler}
12 alias Pleroma.Web.ActivityPub.Transmogrifier
13
14 def is_representable?(%Activity{data: data}) do
15 object = Object.normalize(data["object"])
16
17 cond do
18 is_nil(object) ->
19 false
20
21 object.data["type"] == "Note" ->
22 true
23
24 true ->
25 false
26 end
27 end
28
29 def feed_path(user) do
30 "#{user.ap_id}/feed.atom"
31 end
32
33 def pubsub_path(user) do
34 "#{Web.base_url()}/push/hub/#{user.nickname}"
35 end
36
37 def salmon_path(user) do
38 "#{user.ap_id}/salmon"
39 end
40
41 def remote_follow_path do
42 "#{Web.base_url()}/ostatus_subscribe?acct={uri}"
43 end
44
45 def handle_incoming(xml_string) do
46 with doc when doc != :error <- parse_document(xml_string) do
47 entries = :xmerl_xpath.string('//entry', doc)
48
49 activities =
50 Enum.map(entries, fn entry ->
51 {:xmlObj, :string, object_type} =
52 :xmerl_xpath.string('string(/entry/activity:object-type[1])', entry)
53
54 {:xmlObj, :string, verb} = :xmerl_xpath.string('string(/entry/activity:verb[1])', entry)
55 Logger.debug("Handling #{verb}")
56
57 try do
58 case verb do
59 'http://activitystrea.ms/schema/1.0/delete' ->
60 with {:ok, activity} <- DeleteHandler.handle_delete(entry, doc), do: activity
61
62 'http://activitystrea.ms/schema/1.0/follow' ->
63 with {:ok, activity} <- FollowHandler.handle(entry, doc), do: activity
64
65 'http://activitystrea.ms/schema/1.0/unfollow' ->
66 with {:ok, activity} <- UnfollowHandler.handle(entry, doc), do: activity
67
68 'http://activitystrea.ms/schema/1.0/share' ->
69 with {:ok, activity, retweeted_activity} <- handle_share(entry, doc),
70 do: [activity, retweeted_activity]
71
72 'http://activitystrea.ms/schema/1.0/favorite' ->
73 with {:ok, activity, favorited_activity} <- handle_favorite(entry, doc),
74 do: [activity, favorited_activity]
75
76 _ ->
77 case object_type do
78 'http://activitystrea.ms/schema/1.0/note' ->
79 with {:ok, activity} <- NoteHandler.handle_note(entry, doc), do: activity
80
81 'http://activitystrea.ms/schema/1.0/comment' ->
82 with {:ok, activity} <- NoteHandler.handle_note(entry, doc), do: activity
83
84 _ ->
85 Logger.error("Couldn't parse incoming document")
86 nil
87 end
88 end
89 rescue
90 e ->
91 Logger.error("Error occured while handling activity")
92 Logger.error(xml_string)
93 Logger.error(inspect(e))
94 nil
95 end
96 end)
97 |> Enum.filter(& &1)
98
99 {:ok, activities}
100 else
101 _e -> {:error, []}
102 end
103 end
104
105 def make_share(entry, doc, retweeted_activity) do
106 with {:ok, actor} <- find_make_or_update_user(doc),
107 %Object{} = object <- Object.normalize(retweeted_activity.data["object"]),
108 id when not is_nil(id) <- string_from_xpath("/entry/id", entry),
109 {:ok, activity, _object} = ActivityPub.announce(actor, object, id, false) do
110 {:ok, activity}
111 end
112 end
113
114 def handle_share(entry, doc) do
115 with {:ok, retweeted_activity} <- get_or_build_object(entry),
116 {:ok, activity} <- make_share(entry, doc, retweeted_activity) do
117 {:ok, activity, retweeted_activity}
118 else
119 e -> {:error, e}
120 end
121 end
122
123 def make_favorite(entry, doc, favorited_activity) do
124 with {:ok, actor} <- find_make_or_update_user(doc),
125 %Object{} = object <- Object.normalize(favorited_activity.data["object"]),
126 id when not is_nil(id) <- string_from_xpath("/entry/id", entry),
127 {:ok, activity, _object} = ActivityPub.like(actor, object, id, false) do
128 {:ok, activity}
129 end
130 end
131
132 def get_or_build_object(entry) do
133 with {:ok, activity} <- get_or_try_fetching(entry) do
134 {:ok, activity}
135 else
136 _e ->
137 with [object] <- :xmerl_xpath.string('/entry/activity:object', entry) do
138 NoteHandler.handle_note(object, object)
139 end
140 end
141 end
142
143 def get_or_try_fetching(entry) do
144 Logger.debug("Trying to get entry from db")
145
146 with id when not is_nil(id) <- string_from_xpath("//activity:object[1]/id", entry),
147 %Activity{} = activity <- Activity.get_create_activity_by_object_ap_id(id) do
148 {:ok, activity}
149 else
150 _ ->
151 Logger.debug("Couldn't get, will try to fetch")
152
153 with href when not is_nil(href) <-
154 string_from_xpath("//activity:object[1]/link[@type=\"text/html\"]/@href", entry),
155 {:ok, [favorited_activity]} <- fetch_activity_from_url(href) do
156 {:ok, favorited_activity}
157 else
158 e -> Logger.debug("Couldn't find href: #{inspect(e)}")
159 end
160 end
161 end
162
163 def handle_favorite(entry, doc) do
164 with {:ok, favorited_activity} <- get_or_try_fetching(entry),
165 {:ok, activity} <- make_favorite(entry, doc, favorited_activity) do
166 {:ok, activity, favorited_activity}
167 else
168 e -> {:error, e}
169 end
170 end
171
172 def get_attachments(entry) do
173 :xmerl_xpath.string('/entry/link[@rel="enclosure"]', entry)
174 |> Enum.map(fn enclosure ->
175 with href when not is_nil(href) <- string_from_xpath("/link/@href", enclosure),
176 type when not is_nil(type) <- string_from_xpath("/link/@type", enclosure) do
177 %{
178 "type" => "Attachment",
179 "url" => [
180 %{
181 "type" => "Link",
182 "mediaType" => type,
183 "href" => href
184 }
185 ]
186 }
187 end
188 end)
189 |> Enum.filter(& &1)
190 end
191
192 @doc """
193 Gets the content from a an entry.
194 """
195 def get_content(entry) do
196 string_from_xpath("//content", entry)
197 end
198
199 @doc """
200 Get the cw that mastodon uses.
201 """
202 def get_cw(entry) do
203 with cw when not is_nil(cw) <- string_from_xpath("/*/summary", entry) do
204 cw
205 else
206 _e -> nil
207 end
208 end
209
210 def get_tags(entry) do
211 :xmerl_xpath.string('//category', entry)
212 |> Enum.map(fn category -> string_from_xpath("/category/@term", category) end)
213 |> Enum.filter(& &1)
214 |> Enum.map(&String.downcase/1)
215 end
216
217 def maybe_update(doc, user) do
218 if "true" == string_from_xpath("//author[1]/ap_enabled", doc) do
219 Transmogrifier.upgrade_user_from_ap_id(user.ap_id)
220 else
221 maybe_update_ostatus(doc, user)
222 end
223 end
224
225 def maybe_update_ostatus(doc, user) do
226 old_data = %{
227 avatar: user.avatar,
228 bio: user.bio,
229 name: user.name
230 }
231
232 with false <- user.local,
233 avatar <- make_avatar_object(doc),
234 bio <- string_from_xpath("//author[1]/summary", doc),
235 name <- string_from_xpath("//author[1]/poco:displayName", doc),
236 new_data <- %{
237 avatar: avatar || old_data.avatar,
238 name: name || old_data.name,
239 bio: bio || old_data.bio
240 },
241 false <- new_data == old_data do
242 change = Ecto.Changeset.change(user, new_data)
243 User.update_and_set_cache(change)
244 else
245 _ ->
246 {:ok, user}
247 end
248 end
249
250 def find_make_or_update_user(doc) do
251 uri = string_from_xpath("//author/uri[1]", doc)
252
253 with {:ok, user} <- find_or_make_user(uri) do
254 maybe_update(doc, user)
255 end
256 end
257
258 def find_or_make_user(uri) do
259 query = from(user in User, where: user.ap_id == ^uri)
260
261 user = Repo.one(query)
262
263 if is_nil(user) do
264 make_user(uri)
265 else
266 {:ok, user}
267 end
268 end
269
270 def make_user(uri, update \\ false) do
271 with {:ok, info} <- gather_user_info(uri) do
272 data = %{
273 name: info["name"],
274 nickname: info["nickname"] <> "@" <> info["host"],
275 ap_id: info["uri"],
276 info: info,
277 avatar: info["avatar"],
278 bio: info["bio"]
279 }
280
281 with false <- update,
282 %User{} = user <- User.get_by_ap_id(data.ap_id) do
283 {:ok, user}
284 else
285 _e -> User.insert_or_update_user(data)
286 end
287 end
288 end
289
290 # TODO: Just takes the first one for now.
291 def make_avatar_object(author_doc, rel \\ "avatar") do
292 href = string_from_xpath("//author[1]/link[@rel=\"#{rel}\"]/@href", author_doc)
293 type = string_from_xpath("//author[1]/link[@rel=\"#{rel}\"]/@type", author_doc)
294
295 if href do
296 %{
297 "type" => "Image",
298 "url" => [
299 %{
300 "type" => "Link",
301 "mediaType" => type,
302 "href" => href
303 }
304 ]
305 }
306 else
307 nil
308 end
309 end
310
311 def gather_user_info(username) do
312 with {:ok, webfinger_data} <- WebFinger.finger(username),
313 {:ok, feed_data} <- Websub.gather_feed_data(webfinger_data["topic"]) do
314 {:ok, Map.merge(webfinger_data, feed_data) |> Map.put("fqn", username)}
315 else
316 e ->
317 Logger.debug(fn -> "Couldn't gather info for #{username}" end)
318 {:error, e}
319 end
320 end
321
322 # Regex-based 'parsing' so we don't have to pull in a full html parser
323 # It's a hack anyway. Maybe revisit this in the future
324 @mastodon_regex ~r/<link href='(.*)' rel='alternate' type='application\/atom\+xml'>/
325 @gs_regex ~r/<link title=.* href="(.*)" type="application\/atom\+xml" rel="alternate">/
326 @gs_classic_regex ~r/<link rel="alternate" href="(.*)" type="application\/atom\+xml" title=.*>/
327 def get_atom_url(body) do
328 cond do
329 Regex.match?(@mastodon_regex, body) ->
330 [[_, match]] = Regex.scan(@mastodon_regex, body)
331 {:ok, match}
332
333 Regex.match?(@gs_regex, body) ->
334 [[_, match]] = Regex.scan(@gs_regex, body)
335 {:ok, match}
336
337 Regex.match?(@gs_classic_regex, body) ->
338 [[_, match]] = Regex.scan(@gs_classic_regex, body)
339 {:ok, match}
340
341 true ->
342 Logger.debug(fn -> "Couldn't find Atom link in #{inspect(body)}" end)
343 {:error, "Couldn't find the Atom link"}
344 end
345 end
346
347 def fetch_activity_from_atom_url(url) do
348 with true <- String.starts_with?(url, "http"),
349 {:ok, %{body: body, status: code}} when code in 200..299 <-
350 @httpoison.get(
351 url,
352 [Accept: "application/atom+xml"],
353 follow_redirect: true
354 ) do
355 Logger.debug("Got document from #{url}, handling...")
356 handle_incoming(body)
357 else
358 e ->
359 Logger.debug("Couldn't get #{url}: #{inspect(e)}")
360 e
361 end
362 end
363
364 def fetch_activity_from_html_url(url) do
365 Logger.debug("Trying to fetch #{url}")
366
367 with true <- String.starts_with?(url, "http"),
368 {:ok, %{body: body}} <- @httpoison.get(url, [], follow_redirect: true),
369 {:ok, atom_url} <- get_atom_url(body) do
370 fetch_activity_from_atom_url(atom_url)
371 else
372 e ->
373 Logger.debug("Couldn't get #{url}: #{inspect(e)}")
374 e
375 end
376 end
377
378 def fetch_activity_from_url(url) do
379 try do
380 with {:ok, activities} when length(activities) > 0 <- fetch_activity_from_atom_url(url) do
381 {:ok, activities}
382 else
383 _e ->
384 with {:ok, activities} <- fetch_activity_from_html_url(url) do
385 {:ok, activities}
386 end
387 end
388 rescue
389 e ->
390 Logger.debug("Couldn't get #{url}: #{inspect(e)}")
391 {:error, "Couldn't get #{url}: #{inspect(e)}"}
392 end
393 end
394 end