X-Git-Url: http://git.squeep.com/?a=blobdiff_plain;f=lib%2Fpleroma%2Fweb%2Fostatus%2Fostatus.ex;h=5a44b86610301dcea139e5c30f6f5994adc1105d;hb=00a7183118d90946bf243d0f5488db65f8a6f16f;hp=cd471f860de95b60f3d59eee750a1d71584ea3ba;hpb=84027ff00b7fc63934f12129f84b5c7ee1d39248;p=akkoma
diff --git a/lib/pleroma/web/ostatus/ostatus.ex b/lib/pleroma/web/ostatus/ostatus.ex
index cd471f860..5a44b8661 100644
--- a/lib/pleroma/web/ostatus/ostatus.ex
+++ b/lib/pleroma/web/ostatus/ostatus.ex
@@ -1,9 +1,11 @@
defmodule Pleroma.Web.OStatus do
+ @httpoison Application.get_env(:pleroma, :httpoison)
+
import Ecto.Query
import Pleroma.Web.XML
require Logger
- alias Pleroma.{Repo, User, Web}
+ alias Pleroma.{Repo, User, Web, Object, Activity}
alias Pleroma.Web.ActivityPub.ActivityPub
alias Pleroma.Web.{WebFinger, Websub}
@@ -24,68 +26,163 @@ defmodule Pleroma.Web.OStatus do
entries = :xmerl_xpath.string('//entry', doc)
activities = Enum.map(entries, fn (entry) ->
- {:xmlObj, :string, object_type } = :xmerl_xpath.string('string(/entry/activity:object-type[1])', entry)
-
- case object_type do
- 'http://activitystrea.ms/schema/1.0/note' ->
- {:ok, activity} = handle_note(entry, doc)
- activity
- 'http://activitystrea.ms/schema/1.0/comment' ->
- {:ok, activity} = handle_note(entry, doc)
- activity
+ {:xmlObj, :string, object_type} = :xmerl_xpath.string('string(/entry/activity:object-type[1])', entry)
+ {:xmlObj, :string, verb} = :xmerl_xpath.string('string(/entry/activity:verb[1])', entry)
+
+ case verb do
+ 'http://activitystrea.ms/schema/1.0/share' ->
+ with {:ok, activity, retweeted_activity} <- handle_share(entry, doc), do: [activity, retweeted_activity]
+ 'http://activitystrea.ms/schema/1.0/favorite' ->
+ with {:ok, activity, favorited_activity} <- handle_favorite(entry, doc), do: [activity, favorited_activity]
_ ->
- Logger.error("Couldn't parse incoming document")
- nil
+ case object_type do
+ 'http://activitystrea.ms/schema/1.0/note' ->
+ with {:ok, activity} <- handle_note(entry, doc), do: activity
+ 'http://activitystrea.ms/schema/1.0/comment' ->
+ with {:ok, activity} <- handle_note(entry, doc), do: activity
+ _ ->
+ Logger.error("Couldn't parse incoming document")
+ nil
+ end
end
end)
{:ok, activities}
end
- # TODO
- # wire up replies
- def handle_note(entry, doc \\ nil) do
- content_html = string_from_xpath("/entry/content[1]", entry)
+ def make_share(_entry, doc, retweeted_activity) do
+ with {:ok, actor} <- find_make_or_update_user(doc),
+ %Object{} = object <- Object.get_cached_by_ap_id(retweeted_activity.data["object"]["id"]),
+ {:ok, activity, _object} = ActivityPub.announce(actor, object, false) do
+ {:ok, activity}
+ end
+ end
- uri = string_from_xpath("/entry/author/uri[1]", entry) || string_from_xpath("/feed/author/uri[1]", doc)
- {:ok, actor} = find_or_make_user(uri)
+ def handle_share(entry, doc) do
+ with [object] <- :xmerl_xpath.string('/entry/activity:object', entry),
+ {:ok, retweeted_activity} <- handle_note(object, object),
+ {:ok, activity} <- make_share(entry, doc, retweeted_activity) do
+ {:ok, activity, retweeted_activity}
+ else
+ e -> {:error, e}
+ end
+ end
- context = string_from_xpath("/entry/ostatus:conversation[1]", entry) |> String.trim
- context = if String.length(context) > 0 do
- context
+ def make_favorite(_entry, doc, favorited_activity) do
+ with {:ok, actor} <- find_make_or_update_user(doc),
+ %Object{} = object <- Object.get_cached_by_ap_id(favorited_activity.data["object"]["id"]),
+ {:ok, activity, _object} = ActivityPub.like(actor, object, false) do
+ {:ok, activity}
+ end
+ end
+
+ def get_or_try_fetching(entry) do
+ with id when not is_nil(id) <- string_from_xpath("//activity:object[1]/id", entry),
+ %Activity{} = activity <- Activity.get_create_activity_by_object_ap_id(id) do
+ {:ok, activity}
+ else _e ->
+ with href when not is_nil(href) <- string_from_xpath("//activity:object[1]/link[@type=\"text/html\"]/@href", entry),
+ {:ok, [favorited_activity]} <- fetch_activity_from_html_url(href) do
+ {:ok, favorited_activity}
+ end
+ end
+ end
+
+ def handle_favorite(entry, doc) do
+ with {:ok, favorited_activity} <- get_or_try_fetching(entry),
+ {:ok, activity} <- make_favorite(entry, doc, favorited_activity) do
+ {:ok, activity, favorited_activity}
else
- ActivityPub.generate_context_id
+ e -> {:error, e}
end
+ end
+
+ def get_attachments(entry) do
+ :xmerl_xpath.string('/entry/link[@rel="enclosure"]', entry)
+ |> Enum.map(fn (enclosure) ->
+ with href when not is_nil(href) <- string_from_xpath("/link/@href", enclosure),
+ type when not is_nil(type) <- string_from_xpath("/link/@type", enclosure) do
+ %{
+ "type" => "Attachment",
+ "url" => [%{
+ "type" => "Link",
+ "mediaType" => type,
+ "href" => href
+ }]
+ }
+ end
+ end)
+ |> Enum.filter(&(&1))
+ end
+
+ def handle_note(entry, doc \\ nil) do
+ content_html = string_from_xpath("//content[1]", entry)
+
+ [author] = :xmerl_xpath.string('//author[1]', doc)
+ {:ok, actor} = find_make_or_update_user(author)
+ inReplyTo = string_from_xpath("//thr:in-reply-to[1]/@ref", entry)
+
+ context = (string_from_xpath("//ostatus:conversation[1]", entry) || "") |> String.trim
+
+ attachments = get_attachments(entry)
+
+ context = with %{data: %{"context" => context}} <- Object.get_cached_by_ap_id(inReplyTo) do
+ context
+ else _e ->
+ if String.length(context) > 0 do
+ context
+ else
+ ActivityPub.generate_context_id
+ end
+ end
to = [
"https://www.w3.org/ns/activitystreams#Public"
]
- mentions = :xmerl_xpath.string('/entry/link[@rel="mentioned" and @ostatus:object-type="http://activitystrea.ms/schema/1.0/person"]', entry)
+ mentions = :xmerl_xpath.string('//link[@rel="mentioned" and @ostatus:object-type="http://activitystrea.ms/schema/1.0/person"]', entry)
|> Enum.map(fn(person) -> string_from_xpath("@href", person) end)
to = to ++ mentions
- date = string_from_xpath("/entry/published", entry)
- id = string_from_xpath("/entry/id", entry)
+ date = string_from_xpath("//published", entry)
+ id = string_from_xpath("//id", entry)
object = %{
+ "id" => id,
"type" => "Note",
"to" => to,
"content" => content_html,
"published" => date,
"context" => context,
- "actor" => actor.ap_id
+ "actor" => actor.ap_id,
+ "attachment" => attachments
}
- inReplyTo = string_from_xpath("/entry/thr:in-reply-to[1]/@ref", entry)
-
object = if inReplyTo do
Map.put(object, "inReplyTo", inReplyTo)
else
object
end
- ActivityPub.create(to, actor, context, object, %{"id" => id}, date)
+ # TODO: Bail out sooner and use transaction.
+ if Object.get_by_ap_id(id) do
+ {:error, "duplicate activity"}
+ else
+ ActivityPub.create(to, actor, context, object, %{}, date, false)
+ end
+ end
+
+ def find_make_or_update_user(doc) do
+ uri = string_from_xpath("//author/uri[1]", doc)
+ with {:ok, user} <- find_or_make_user(uri) do
+ avatar = make_avatar_object(doc)
+ if user.avatar != avatar do
+ change = Ecto.Changeset.change(user, %{avatar: avatar})
+ Repo.update(change)
+ else
+ {:ok, user}
+ end
+ end
end
def find_or_make_user(uri) do
@@ -105,10 +202,11 @@ defmodule Pleroma.Web.OStatus do
with {:ok, info} <- gather_user_info(uri) do
data = %{
local: false,
- name: info.name,
- nickname: info.nickname <> "@" <> info.host,
- ap_id: info.uri,
- info: info
+ name: info["name"],
+ nickname: info["nickname"] <> "@" <> info["host"],
+ ap_id: info["uri"],
+ info: info,
+ avatar: info["avatar"]
}
# TODO: Make remote user changeset
# SHould enforce fqn nickname
@@ -117,9 +215,9 @@ defmodule Pleroma.Web.OStatus do
end
# TODO: Just takes the first one for now.
- defp make_avatar_object(author_doc) do
- href = string_from_xpath("/author[1]/link[@rel=\"avatar\"]/@href", author_doc)
- type = string_from_xpath("/author[1]/link[@rel=\"avatar\"]/@type", author_doc)
+ def make_avatar_object(author_doc) do
+ href = string_from_xpath("//author[1]/link[@rel=\"avatar\"]/@href", author_doc)
+ type = string_from_xpath("//author[1]/link[@rel=\"avatar\"]/@type", author_doc)
if href do
%{
@@ -138,11 +236,37 @@ defmodule Pleroma.Web.OStatus do
def gather_user_info(username) do
with {:ok, webfinger_data} <- WebFinger.finger(username),
- {:ok, feed_data} <- Websub.gather_feed_data(webfinger_data.topic) do
- {:ok, Map.merge(webfinger_data, feed_data) |> Map.put(:fqn, username)}
+ {:ok, feed_data} <- Websub.gather_feed_data(webfinger_data["topic"]) do
+ {:ok, Map.merge(webfinger_data, feed_data) |> Map.put("fqn", username)}
else e ->
- Logger.debug("Couldn't gather info for #{username}")
+ Logger.debug(fn -> "Couldn't gather info for #{username}" end)
{:error, e}
end
end
+
+ # Regex-based 'parsing' so we don't have to pull in a full html parser
+ # It's a hack anyway. Maybe revisit this in the future
+ @mastodon_regex ~r//
+ @gs_regex ~r//
+ def get_atom_url(body) do
+ cond do
+ Regex.match?(@mastodon_regex, body) ->
+ [[_, match]] = Regex.scan(@mastodon_regex, body)
+ {:ok, match}
+ Regex.match?(@gs_regex, body) ->
+ [[_, match]] = Regex.scan(@gs_regex, body)
+ {:ok, match}
+ true ->
+ Logger.debug(fn -> "Couldn't find atom link in #{inspect(body)}" end)
+ {:error, "Couldn't find the atom link"}
+ end
+ end
+
+ def fetch_activity_from_html_url(url) do
+ with {:ok, %{body: body}} <- @httpoison.get(url),
+ {:ok, atom_url} <- get_atom_url(body),
+ {:ok, %{status_code: code, body: body}} when code in 200..299 <- @httpoison.get(atom_url) do
+ handle_incoming(body)
+ end
+ end
end