Merge branch 'patch-README' into 'develop'
[akkoma] / lib / pleroma / web / ostatus / ostatus.ex
1 defmodule Pleroma.Web.OStatus do
2 @httpoison Application.get_env(:pleroma, :httpoison)
3
4 import Ecto.Query
5 import Pleroma.Web.XML
6 require Logger
7
8 alias Pleroma.{Repo, User, Web, Object, Activity}
9 alias Pleroma.Web.ActivityPub.ActivityPub
10 alias Pleroma.Web.{WebFinger, Websub}
11 alias Pleroma.Web.OStatus.{FollowHandler, NoteHandler, DeleteHandler}
12 alias Pleroma.Web.ActivityPub.Transmogrifier
13
14 def feed_path(user) do
15 "#{user.ap_id}/feed.atom"
16 end
17
18 def pubsub_path(user) do
19 "#{Web.base_url}/push/hub/#{user.nickname}"
20 end
21
22 def salmon_path(user) do
23 "#{user.ap_id}/salmon"
24 end
25
26 def remote_follow_path do
27 "#{Web.base_url}/ostatus_subscribe?acct={uri}"
28 end
29
30 def handle_incoming(xml_string) do
31 with doc when doc != :error <- parse_document(xml_string) do
32 entries = :xmerl_xpath.string('//entry', doc)
33
34 activities = Enum.map(entries, fn (entry) ->
35 {:xmlObj, :string, object_type} = :xmerl_xpath.string('string(/entry/activity:object-type[1])', entry)
36 {:xmlObj, :string, verb} = :xmerl_xpath.string('string(/entry/activity:verb[1])', entry)
37 Logger.debug("Handling #{verb}")
38
39 try do
40 case verb do
41 'http://activitystrea.ms/schema/1.0/delete' ->
42 with {:ok, activity} <- DeleteHandler.handle_delete(entry, doc), do: activity
43 'http://activitystrea.ms/schema/1.0/follow' ->
44 with {:ok, activity} <- FollowHandler.handle(entry, doc), do: activity
45 'http://activitystrea.ms/schema/1.0/share' ->
46 with {:ok, activity, retweeted_activity} <- handle_share(entry, doc), do: [activity, retweeted_activity]
47 'http://activitystrea.ms/schema/1.0/favorite' ->
48 with {:ok, activity, favorited_activity} <- handle_favorite(entry, doc), do: [activity, favorited_activity]
49 _ ->
50 case object_type do
51 'http://activitystrea.ms/schema/1.0/note' ->
52 with {:ok, activity} <- NoteHandler.handle_note(entry, doc), do: activity
53 'http://activitystrea.ms/schema/1.0/comment' ->
54 with {:ok, activity} <- NoteHandler.handle_note(entry, doc), do: activity
55 _ ->
56 Logger.error("Couldn't parse incoming document")
57 nil
58 end
59 end
60 rescue
61 e ->
62 Logger.error("Error occured while handling activity")
63 Logger.error(xml_string)
64 Logger.error(inspect(e))
65 nil
66 end
67 end)
68 |> Enum.filter(&(&1))
69
70 {:ok, activities}
71 else
72 _e -> {:error, []}
73 end
74 end
75
76 def make_share(entry, doc, retweeted_activity) do
77 with {:ok, actor} <- find_make_or_update_user(doc),
78 %Object{} = object <- Object.get_by_ap_id(retweeted_activity.data["object"]["id"]),
79 id when not is_nil(id) <- string_from_xpath("/entry/id", entry),
80 {:ok, activity, _object} = ActivityPub.announce(actor, object, id, false) do
81 {:ok, activity}
82 end
83 end
84
85 def handle_share(entry, doc) do
86 with {:ok, retweeted_activity} <- get_or_build_object(entry),
87 {:ok, activity} <- make_share(entry, doc, retweeted_activity) do
88 {:ok, activity, retweeted_activity}
89 else
90 e -> {:error, e}
91 end
92 end
93
94 def make_favorite(entry, doc, favorited_activity) do
95 with {:ok, actor} <- find_make_or_update_user(doc),
96 %Object{} = object <- Object.get_by_ap_id(favorited_activity.data["object"]["id"]),
97 id when not is_nil(id) <- string_from_xpath("/entry/id", entry),
98 {:ok, activity, _object} = ActivityPub.like(actor, object, id, false) do
99 {:ok, activity}
100 end
101 end
102
103 def get_or_build_object(entry) do
104 with {:ok, activity} <- get_or_try_fetching(entry) do
105 {:ok, activity}
106 else
107 _e ->
108 with [object] <- :xmerl_xpath.string('/entry/activity:object', entry) do
109 NoteHandler.handle_note(object, object)
110 end
111 end
112 end
113
114 def get_or_try_fetching(entry) do
115 Logger.debug("Trying to get entry from db")
116 with id when not is_nil(id) <- string_from_xpath("//activity:object[1]/id", entry),
117 %Activity{} = activity <- Activity.get_create_activity_by_object_ap_id(id) do
118 {:ok, activity}
119 else _ ->
120 Logger.debug("Couldn't get, will try to fetch")
121 with href when not is_nil(href) <- string_from_xpath("//activity:object[1]/link[@type=\"text/html\"]/@href", entry),
122 {:ok, [favorited_activity]} <- fetch_activity_from_url(href) do
123 {:ok, favorited_activity}
124 else e -> Logger.debug("Couldn't find href: #{inspect(e)}")
125 end
126 end
127 end
128
129 def handle_favorite(entry, doc) do
130 with {:ok, favorited_activity} <- get_or_try_fetching(entry),
131 {:ok, activity} <- make_favorite(entry, doc, favorited_activity) do
132 {:ok, activity, favorited_activity}
133 else
134 e -> {:error, e}
135 end
136 end
137
138 def get_attachments(entry) do
139 :xmerl_xpath.string('/entry/link[@rel="enclosure"]', entry)
140 |> Enum.map(fn (enclosure) ->
141 with href when not is_nil(href) <- string_from_xpath("/link/@href", enclosure),
142 type when not is_nil(type) <- string_from_xpath("/link/@type", enclosure) do
143 %{
144 "type" => "Attachment",
145 "url" => [%{
146 "type" => "Link",
147 "mediaType" => type,
148 "href" => href
149 }]
150 }
151 end
152 end)
153 |> Enum.filter(&(&1))
154 end
155
156 @doc """
157 Gets the content from a an entry.
158 """
159 def get_content(entry) do
160 string_from_xpath("//content", entry)
161 end
162
163 @doc """
164 Get the cw that mastodon uses.
165 """
166 def get_cw(entry) do
167 with cw when not is_nil(cw) <- string_from_xpath("/*/summary", entry) do
168 cw
169 else _e -> nil
170 end
171 end
172
173 def get_tags(entry) do
174 :xmerl_xpath.string('//category', entry)
175 |> Enum.map(fn (category) -> string_from_xpath("/category/@term", category) end)
176 |> Enum.filter(&(&1))
177 |> Enum.map(&String.downcase/1)
178 end
179
180 def maybe_update(doc, user) do
181 if "true" == string_from_xpath("//author[1]/ap_enabled", doc) do
182 Transmogrifier.upgrade_user_from_ap_id(user.ap_id)
183 else
184 maybe_update_ostatus(doc, user)
185 end
186 end
187 def maybe_update_ostatus(doc, user) do
188 old_data = %{
189 avatar: user.avatar,
190 bio: user.bio,
191 name: user.name,
192 info: user.info
193 }
194
195 with false <- user.local,
196 avatar <- make_avatar_object(doc),
197 bio <- string_from_xpath("//author[1]/summary", doc),
198 name <- string_from_xpath("//author[1]/poco:displayName", doc),
199 info <- Map.put(user.info, "banner", make_avatar_object(doc, "header") || user.info["banner"]),
200 new_data <- %{avatar: avatar || old_data.avatar, name: name || old_data.name, bio: bio || old_data.bio, info: info || old_data.info},
201 false <- new_data == old_data do
202 change = Ecto.Changeset.change(user, new_data)
203 Repo.update(change)
204 else _ ->
205 {:ok, user}
206 end
207 end
208
209 def find_make_or_update_user(doc) do
210 uri = string_from_xpath("//author/uri[1]", doc)
211 with {:ok, user} <- find_or_make_user(uri) do
212 maybe_update(doc, user)
213 end
214 end
215
216 def find_or_make_user(uri) do
217 query = from user in User,
218 where: user.ap_id == ^uri
219
220 user = Repo.one(query)
221
222 if is_nil(user) do
223 make_user(uri)
224 else
225 {:ok, user}
226 end
227 end
228
229 def make_user(uri, update \\ false) do
230 with {:ok, info} <- gather_user_info(uri) do
231 data = %{
232 name: info["name"],
233 nickname: info["nickname"] <> "@" <> info["host"],
234 ap_id: info["uri"],
235 info: info,
236 avatar: info["avatar"],
237 bio: info["bio"]
238 }
239 with false <- update,
240 %User{} = user <- User.get_by_ap_id(data.ap_id) do
241 {:ok, user}
242 else _e -> User.insert_or_update_user(data)
243 end
244 end
245 end
246
247 # TODO: Just takes the first one for now.
248 def make_avatar_object(author_doc, rel \\ "avatar") do
249 href = string_from_xpath("//author[1]/link[@rel=\"#{rel}\"]/@href", author_doc)
250 type = string_from_xpath("//author[1]/link[@rel=\"#{rel}\"]/@type", author_doc)
251
252 if href do
253 %{
254 "type" => "Image",
255 "url" =>
256 [%{
257 "type" => "Link",
258 "mediaType" => type,
259 "href" => href
260 }]
261 }
262 else
263 nil
264 end
265 end
266
267 def gather_user_info(username) do
268 with {:ok, webfinger_data} <- WebFinger.finger(username),
269 {:ok, feed_data} <- Websub.gather_feed_data(webfinger_data["topic"]) do
270 {:ok, Map.merge(webfinger_data, feed_data) |> Map.put("fqn", username)}
271 else e ->
272 Logger.debug(fn -> "Couldn't gather info for #{username}" end)
273 {:error, e}
274 end
275 end
276
277 # Regex-based 'parsing' so we don't have to pull in a full html parser
278 # It's a hack anyway. Maybe revisit this in the future
279 @mastodon_regex ~r/<link href='(.*)' rel='alternate' type='application\/atom\+xml'>/
280 @gs_regex ~r/<link title=.* href="(.*)" type="application\/atom\+xml" rel="alternate">/
281 @gs_classic_regex ~r/<link rel="alternate" href="(.*)" type="application\/atom\+xml" title=.*>/
282 def get_atom_url(body) do
283 cond do
284 Regex.match?(@mastodon_regex, body) ->
285 [[_, match]] = Regex.scan(@mastodon_regex, body)
286 {:ok, match}
287 Regex.match?(@gs_regex, body) ->
288 [[_, match]] = Regex.scan(@gs_regex, body)
289 {:ok, match}
290 Regex.match?(@gs_classic_regex, body) ->
291 [[_, match]] = Regex.scan(@gs_classic_regex, body)
292 {:ok, match}
293 true ->
294 Logger.debug(fn -> "Couldn't find Atom link in #{inspect(body)}" end)
295 {:error, "Couldn't find the Atom link"}
296 end
297 end
298
299 def fetch_activity_from_atom_url(url) do
300 with true <- String.starts_with?(url, "http"),
301 {:ok, %{body: body, status_code: code}} when code in 200..299 <- @httpoison.get(url, [Accept: "application/atom+xml"], follow_redirect: true, timeout: 10000, recv_timeout: 20000) do
302 Logger.debug("Got document from #{url}, handling...")
303 handle_incoming(body)
304 else
305 e ->
306 Logger.debug("Couldn't get #{url}: #{inspect(e)}")
307 e
308 end
309 end
310
311 def fetch_activity_from_html_url(url) do
312 Logger.debug("Trying to fetch #{url}")
313 with true <- String.starts_with?(url, "http"),
314 {:ok, %{body: body}} <- @httpoison.get(url, [], follow_redirect: true, timeout: 10000, recv_timeout: 20000),
315 {:ok, atom_url} <- get_atom_url(body) do
316 fetch_activity_from_atom_url(atom_url)
317 else
318 e ->
319 Logger.debug("Couldn't get #{url}: #{inspect(e)}")
320 e
321 end
322 end
323
324 def fetch_activity_from_url(url) do
325 try do
326 with {:ok, activities} when length(activities) > 0 <- fetch_activity_from_atom_url(url) do
327 {:ok, activities}
328 else
329 _e -> with {:ok, activities} <- fetch_activity_from_html_url(url) do
330 {:ok, activities}
331 end
332 end
333 rescue
334 e ->
335 Logger.debug("Couldn't get #{url}: #{inspect(e)}")
336 {:error, "Couldn't get #{url}: #{inspect(e)}"}
337 end
338 end
339 end