The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
+## unreleased-patch - ???
+
+### Added
+- Rich media failure tracking (along with `:failure_backoff` option)
+
+### Fixed
+- Mastodon API: Search parameter `following` now correctly returns the followings rather than the followers
+- Mastodon API: Timelines hanging for (`number of posts with links * rich media timeout`) in the worst case.
+ Reduced to just rich media timeout.
+- Password resets no longer processed for deactivated accounts
+
## [2.1.0] - 2020-08-28
### Changed
Pleroma.Web.RichMedia.Parsers.TwitterCard,
Pleroma.Web.RichMedia.Parsers.OEmbed
],
+ failure_backoff: 60_000,
ttl_setters: [Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl]
config :pleroma, :media_proxy,
config :pleroma, :pools,
federation: [
size: 50,
- max_waiting: 10
+ max_waiting: 10,
+ timeout: 10_000
],
media: [
size: 50,
- max_waiting: 10
+ max_waiting: 10,
+ timeout: 10_000
],
upload: [
size: 25,
- max_waiting: 5
+ max_waiting: 5,
+ timeout: 15_000
],
default: [
size: 10,
- max_waiting: 2
+ max_waiting: 2,
+ timeout: 5_000
]
config :pleroma, :hackney_pools,
suggestions: [
Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl
]
+ },
+ %{
+ key: :failure_backoff,
+ type: :integer,
+ description:
+ "Amount of milliseconds after request failure, during which the request will not be retried.",
+ suggestions: [60_000]
}
]
},
* `ignore_hosts`: list of hosts which will be ignored by the metadata parser. For example `["accounts.google.com", "xss.website"]`, defaults to `[]`.
* `ignore_tld`: list TLDs (top-level domains) which will ignore for parse metadata. default is ["local", "localdomain", "lan"].
* `parsers`: list of Rich Media parsers.
+* `failure_backoff`: Amount of milliseconds after request failure, during which the request will not be retried.
## HTTP server
fe_label = "#{frontend} (#{ref})"
- tmp_dir = Path.join(dest, "tmp")
+ tmp_dir = Path.join([instance_static_dir, "frontends", "tmp"])
with {_, :ok} <-
{:download_or_unzip, download_or_unzip(frontend_info, tmp_dir, options[:file])},
url = String.replace(frontend_info["build_url"], "${ref}", frontend_info["ref"])
with {:ok, %{status: 200, body: zip_body}} <-
- Pleroma.HTTP.get(url, [], timeout: 120_000, recv_timeout: 120_000) do
+ Pleroma.HTTP.get(url, [],
+ adapter: [pool: :media, timeout: 120_000, recv_timeout: 120_000]
+ ) do
unzip(zip_body, dest)
else
e -> {:error, e}
defp install_frontend(frontend_info, source, dest) do
from = frontend_info["build_dir"] || "dist"
+ File.rm_rf!(dest)
File.mkdir_p!(dest)
File.cp_r!(Path.join([source, from]), dest)
:ok
def repository, do: @repository
def user_agent do
- case Config.get([:http, :user_agent], :default) do
- :default ->
- info = "#{Pleroma.Web.base_url()} <#{Config.get([:instance, :email], "")}>"
- named_version() <> "; " <> info
-
- custom ->
- custom
+ if Process.whereis(Pleroma.Web.Endpoint) do
+ case Config.get([:http, :user_agent], :default) do
+ :default ->
+ info = "#{Pleroma.Web.base_url()} <#{Config.get([:instance, :email], "")}>"
+ named_version() <> "; " <> info
+
+ custom ->
+ custom
+ end
+ else
+ # fallback, if endpoint is not started yet
+ "Pleroma Data Loader"
end
end
# every time the application is restarted, so we disable module
# conflicts at runtime
Code.compiler_options(ignore_module_conflict: true)
+ # Disable warnings_as_errors at runtime, it breaks Phoenix live reload
+ # due to protocol consolidation warnings
+ Code.compiler_options(warnings_as_errors: false)
Pleroma.Telemetry.Logger.attach()
Config.Holder.save_default()
Pleroma.HTML.compile_scrubbers()
end)
{ref, state} = pop_in(state.client_monitors[client_pid])
- Process.demonitor(ref)
-
- timer =
- if used_by == [] do
- max_idle = Pleroma.Config.get([:connections_pool, :max_idle_time], 30_000)
- Process.send_after(self(), :idle_close, max_idle)
+ # DOWN message can receive right after `remove_client` call and cause worker to terminate
+ state =
+ if is_nil(ref) do
+ state
else
- nil
+ Process.demonitor(ref)
+
+ timer =
+ if used_by == [] do
+ max_idle = Pleroma.Config.get([:connections_pool, :max_idle_time], 30_000)
+ Process.send_after(self(), :idle_close, max_idle)
+ else
+ nil
+ end
+
+ %{state | timer: timer}
end
- {:reply, :ok, %{state | timer: timer}, :hibernate}
+ {:reply, :ok, state, :hibernate}
end
@impl true
{:stop, :normal, state}
end
+ @impl true
+ def handle_info({:gun_up, _pid, _protocol}, state) do
+ {:noreply, state, :hibernate}
+ end
+
# Gracefully shutdown if the connection got closed without any streams left
@impl true
def handle_info({:gun_down, _pid, _protocol, _reason, []}, state) do
{:stop, :normal, state}
end
- # Otherwise, shutdown with an error
+ # Otherwise, wait for retry
@impl true
- def handle_info({:gun_down, _pid, _protocol, _reason, _killed_streams} = down_message, state) do
- {:stop, {:error, down_message}, state}
+ def handle_info({:gun_down, _pid, _protocol, _reason, _killed_streams}, state) do
+ {:noreply, state, :hibernate}
end
@impl true
result =
content
|> Floki.parse_fragment!()
- |> Floki.filter_out("a.mention,a.hashtag,a.attachment,a[rel~=\"tag\"]")
- |> Floki.attribute("a", "href")
+ |> Floki.find("a:not(.mention,.hashtag,.attachment,[rel~=\"tag\"])")
+ |> Enum.take(1)
+ |> Floki.attribute("href")
|> Enum.at(0)
{:commit, {:ok, result}}
@type proxy_type() :: :socks4 | :socks5
@type host() :: charlist() | :inet.ip_address()
- alias Pleroma.Config
alias Pleroma.HTTP.AdapterHelper
require Logger
@spec options(URI.t(), keyword()) :: keyword()
def options(%URI{} = uri, opts \\ []) do
@defaults
- |> put_timeout()
|> Keyword.merge(opts)
|> adapter_helper().options(uri)
end
- # For Hackney, this is the time a connection can stay idle in the pool.
- # For Gun, this is the timeout to receive a message from Gun.
- defp put_timeout(opts) do
- {config_key, default} =
- if adapter() == Tesla.Adapter.Gun do
- {:pools, Config.get([:pools, :default, :timeout], 5_000)}
- else
- {:hackney_pools, 10_000}
- end
-
- timeout = Config.get([config_key, opts[:pool], :timeout], default)
-
- Keyword.merge(opts, timeout: timeout)
- end
-
+ @spec get_conn(URI.t(), keyword()) :: {:ok, keyword()} | {:error, atom()}
def get_conn(uri, opts), do: adapter_helper().get_conn(uri, opts)
+
defp adapter, do: Application.get_env(:tesla, :adapter)
defp adapter_helper do
defmodule Pleroma.HTTP.AdapterHelper.Gun do
@behaviour Pleroma.HTTP.AdapterHelper
+ alias Pleroma.Config
alias Pleroma.Gun.ConnectionPool
alias Pleroma.HTTP.AdapterHelper
connect_timeout: 5_000,
domain_lookup_timeout: 5_000,
tls_handshake_timeout: 5_000,
- retry: 0,
+ retry: 1,
retry_timeout: 1000,
await_up_timeout: 5_000
]
+ @type pool() :: :federation | :upload | :media | :default
+
@spec options(keyword(), URI.t()) :: keyword()
def options(incoming_opts \\ [], %URI{} = uri) do
proxy =
- Pleroma.Config.get([:http, :proxy_url])
+ [:http, :proxy_url]
+ |> Config.get()
|> AdapterHelper.format_proxy()
- config_opts = Pleroma.Config.get([:http, :adapter], [])
+ config_opts = Config.get([:http, :adapter], [])
@defaults
|> Keyword.merge(config_opts)
|> add_scheme_opts(uri)
|> AdapterHelper.maybe_add_proxy(proxy)
|> Keyword.merge(incoming_opts)
+ |> put_timeout()
end
defp add_scheme_opts(opts, %{scheme: "http"}), do: opts
defp add_scheme_opts(opts, %{scheme: "https"}) do
- opts
- |> Keyword.put(:certificates_verification, true)
+ Keyword.put(opts, :certificates_verification, true)
+ end
+
+ defp put_timeout(opts) do
+ # this is the timeout to receive a message from Gun
+ Keyword.put_new(opts, :timeout, pool_timeout(opts[:pool]))
+ end
+
+ @spec pool_timeout(pool()) :: non_neg_integer()
+ def pool_timeout(pool) do
+ default = Config.get([:pools, :default, :timeout], 5_000)
+
+ Config.get([:pools, pool, :timeout], default)
end
@spec get_conn(URI.t(), keyword()) :: {:ok, keyword()} | {:error, atom()}
@prefix Pleroma.Gun.ConnectionPool
def limiter_setup do
- wait = Pleroma.Config.get([:connections_pool, :connection_acquisition_wait])
- retries = Pleroma.Config.get([:connections_pool, :connection_acquisition_retries])
+ wait = Config.get([:connections_pool, :connection_acquisition_wait])
+ retries = Config.get([:connections_pool, :connection_acquisition_retries])
:pools
- |> Pleroma.Config.get([])
+ |> Config.get([])
|> Enum.each(fn {name, opts} ->
max_running = Keyword.get(opts, :size, 50)
max_waiting = Keyword.get(opts, :max_waiting, 10)
case result do
:ok -> :ok
{:error, :existing} -> :ok
- e -> raise e
end
end)
@impl true
def request(method, url, body \\ "", headers \\ [], http_opts \\ []) do
+ http_opts = Keyword.put_new(http_opts, :adapter, pool: :upload)
+
case HTTP.request(method, url, body, headers, http_opts) do
{:ok, env} ->
{:ok, %{status_code: env.status, headers: env.headers, body: env.body}}
@impl true
def get(url, headers, options) do
+ options = Keyword.put_new(options, :adapter, pool: :default)
+
with {:ok, %Tesla.Env{} = env} <- HTTP.get(url, headers, options) do
{:ok, {env.status, env.headers, env.body}}
end
@impl true
def head(url, headers, options) do
+ options = Keyword.put_new(options, :adapter, pool: :default)
+
with {:ok, %Tesla.Env{} = env} <- HTTP.head(url, headers, options) do
{:ok, {env.status, env.headers}}
end
defp scrape_favicon(%URI{} = instance_uri) do
try do
with {:ok, %Tesla.Env{body: html}} <-
- Pleroma.HTTP.get(to_string(instance_uri), [{:Accept, "text/html"}]),
+ Pleroma.HTTP.get(to_string(instance_uri), [{"accept", "text/html"}],
+ adapter: [pool: :media]
+ ),
favicon_rel <-
html
|> Floki.parse_document!()
date: date
})
- [{"signature", signature}]
+ {"signature", signature}
end
defp sign_fetch(headers, id, date) do
if Pleroma.Config.get([:activitypub, :sign_object_fetches]) do
- headers ++ make_signature(id, date)
+ [make_signature(id, date) | headers]
else
headers
end
defp maybe_date_fetch(headers, date) do
if Pleroma.Config.get([:activitypub, :sign_object_fetches]) do
- headers ++ [{"date", date}]
+ [{"date", date} | headers]
else
headers
end
op =
if streaming do
- upload.tempfile
- |> ExAws.S3.Upload.stream_file()
- |> ExAws.S3.upload(bucket, s3_name, [
- {:acl, :public_read},
- {:content_type, upload.content_type}
- ])
+ op =
+ upload.tempfile
+ |> ExAws.S3.Upload.stream_file()
+ |> ExAws.S3.upload(bucket, s3_name, [
+ {:acl, :public_read},
+ {:content_type, upload.content_type}
+ ])
+
+ if Application.get_env(:tesla, :adapter) == Tesla.Adapter.Gun do
+ # set s3 upload timeout to respect :upload pool timeout
+ # timeout should be slightly larger, so s3 can retry upload on fail
+ timeout = Pleroma.HTTP.AdapterHelper.Gun.pool_timeout(:upload) + 1_000
+ opts = Keyword.put(op.opts, :timeout, timeout)
+ Map.put(op, :opts, opts)
+ else
+ op
+ end
else
{:ok, file_data} = File.read(upload.tempfile)
]
schema "users" do
- field(:bio, :string)
+ field(:bio, :string, default: "")
field(:raw_bio, :string)
field(:email, :string)
field(:name, :string)
# "Right to be forgotten"
# https://gdpr.eu/right-to-be-forgotten/
change(user, %{
- bio: nil,
+ bio: "",
raw_bio: nil,
email: nil,
name: nil,
end
defp base_query(_user, false), do: User
- defp base_query(user, true), do: User.get_followers_query(user)
+ defp base_query(user, true), do: User.get_friends_query(user)
defp filter_invisible_users(query) do
from(q in query, where: q.invisible == false)
name: data["name"],
follower_address: data["followers"],
following_address: data["following"],
- bio: data["summary"],
+ bio: data["summary"] || "",
actor_type: actor_type,
also_known_as: Map.get(data, "alsoKnownAs", []),
public_key: public_key,
description: "Add accounts to the given list.",
operationId: "ListController.add_to_list",
parameters: [id_param()],
- requestBody: add_remove_accounts_request(),
+ requestBody: add_remove_accounts_request(true),
security: [%{"oAuth" => ["write:lists"]}],
responses: %{
200 => Operation.response("Empty object", "application/json", %Schema{type: :object})
tags: ["Lists"],
summary: "Remove accounts from list",
operationId: "ListController.remove_from_list",
- parameters: [id_param()],
- requestBody: add_remove_accounts_request(),
+ parameters: [
+ id_param(),
+ Operation.parameter(
+ :account_ids,
+ :query,
+ %Schema{type: :array, items: %Schema{type: :string}},
+ "Array of account IDs"
+ )
+ ],
+ requestBody: add_remove_accounts_request(false),
security: [%{"oAuth" => ["write:lists"]}],
responses: %{
200 => Operation.response("Empty object", "application/json", %Schema{type: :object})
)
end
- defp add_remove_accounts_request do
+ defp add_remove_accounts_request(required) when is_boolean(required) do
request_body(
"Parameters",
%Schema{
properties: %{
account_ids: %Schema{type: :array, description: "Array of account IDs", items: FlakeID}
},
- required: [:account_ids]
+ required: required && [:account_ids]
},
- required: true
+ required: required
)
end
end
nickname = value([registration_attrs["nickname"], Registration.nickname(registration)])
email = value([registration_attrs["email"], Registration.email(registration)])
name = value([registration_attrs["name"], Registration.name(registration)]) || nickname
- bio = value([registration_attrs["bio"], Registration.description(registration)])
+ bio = value([registration_attrs["bio"], Registration.description(registration)]) || ""
random_password = :crypto.strong_rand_bytes(64) |> Base.encode64()
def password_reset(conn, params) do
nickname_or_email = params["email"] || params["nickname"]
- with {:ok, _} <- TwitterAPI.password_reset(nickname_or_email) do
- conn
- |> put_status(:no_content)
- |> json("")
- else
- {:error, "unknown user"} ->
- send_resp(conn, :not_found, "")
-
- {:error, _} ->
- send_resp(conn, :bad_request, "")
- end
+ TwitterAPI.password_reset(nickname_or_email)
+
+ conn
+ |> put_status(:no_content)
+ |> json("")
end
defp local_mastodon_root_path(conn) do
# DELETE /api/v1/lists/:id/accounts
def remove_from_list(
- %{assigns: %{list: list}, body_params: %{account_ids: account_ids}} = conn,
+ %{assigns: %{list: list}, params: %{account_ids: account_ids}} = conn,
_
) do
Enum.each(account_ids, fn account_id ->
json(conn, %{})
end
+ def remove_from_list(%{body_params: params} = conn, _) do
+ remove_from_list(%{conn | params: params}, %{})
+ end
+
defp list_by_id_and_user(%{assigns: %{user: user}, params: %{id: id}} = conn, _) do
case Pleroma.List.get(id, user) do
%Pleroma.List{} = list -> assign(conn, :list, list)
followers_count: followers_count,
following_count: following_count,
statuses_count: user.note_count,
- note: user.bio || "",
+ note: user.bio,
url: user.uri || user.ap_id,
avatar: image,
avatar_static: image,
import Pleroma.Web.ActivityPub.Visibility, only: [get_visibility: 1, visible_for_user?: 2]
+ # This is a naive way to do this, just spawning a process per activity
+ # to fetch the preview. However it should be fine considering
+ # pagination is restricted to 40 activities at a time
+ defp fetch_rich_media_for_activities(activities) do
+ Enum.each(activities, fn activity ->
+ spawn(fn ->
+ Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity)
+ end)
+ end)
+ end
+
# TODO: Add cached version.
defp get_replied_to_activities([]), do: %{}
# To do: check AdminAPIControllerTest on the reasons behind nil activities in the list
activities = Enum.filter(opts.activities, & &1)
+
+ # Start fetching rich media before doing anything else, so that later calls to get the cards
+ # only block for timeout in the worst case, as opposed to
+ # length(activities_with_links) * timeout
+ fetch_rich_media_for_activities(activities)
replied_to_activities = get_replied_to_activities(activities)
parent_activities =
@impl Provider
def build_tags(%{user: user}) do
- with truncated_bio = Utils.scrub_html_and_truncate(user.bio || "") do
+ with truncated_bio = Utils.scrub_html_and_truncate(user.bio) do
[
{:meta,
[
@impl Provider
def build_tags(%{user: user}) do
- with truncated_bio = Utils.scrub_html_and_truncate(user.bio || "") do
+ with truncated_bio = Utils.scrub_html_and_truncate(user.bio) do
[
title_tag(user),
{:meta, [property: "twitter:description", content: truncated_bio], []},
@rich_media_options
end
- Pleroma.HTTP.get(url, headers, options)
+ Pleroma.HTTP.get(url, headers, adapter: options)
end
end
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.RichMedia.Parser do
+ require Logger
+
defp parsers do
Pleroma.Config.get([:rich_media, :parsers])
end
def parse(nil), do: {:error, "No URL provided"}
if Pleroma.Config.get(:env) == :test do
+ @spec parse(String.t()) :: {:ok, map()} | {:error, any()}
def parse(url), do: parse_url(url)
else
+ @spec parse(String.t()) :: {:ok, map()} | {:error, any()}
def parse(url) do
- try do
- Cachex.fetch!(:rich_media_cache, url, fn _ ->
- {:commit, parse_url(url)}
- end)
- |> set_ttl_based_on_image(url)
- rescue
- e ->
- {:error, "Cachex error: #{inspect(e)}"}
+ with {:ok, data} <- get_cached_or_parse(url),
+ {:ok, _} <- set_ttl_based_on_image(data, url) do
+ {:ok, data}
+ else
+ error ->
+ Logger.error(fn -> "Rich media error: #{inspect(error)}" end)
+ end
+ end
+
+ defp get_cached_or_parse(url) do
+ case Cachex.fetch!(:rich_media_cache, url, fn _ -> {:commit, parse_url(url)} end) do
+ {:ok, _data} = res ->
+ res
+
+ {:error, _} = e ->
+ ttl = Pleroma.Config.get([:rich_media, :failure_backoff], 60_000)
+ Cachex.expire(:rich_media_cache, url, ttl)
+ e
end
end
end
config :pleroma, :rich_media,
ttl_setters: [MyModule]
"""
- def set_ttl_based_on_image({:ok, data}, url) do
- with {:ok, nil} <- Cachex.ttl(:rich_media_cache, url),
- ttl when is_number(ttl) <- get_ttl_from_image(data, url) do
- Cachex.expire_at(:rich_media_cache, url, ttl * 1000)
- {:ok, data}
- else
+ @spec set_ttl_based_on_image(map(), String.t()) ::
+ {:ok, Integer.t() | :noop} | {:error, :no_key}
+ def set_ttl_based_on_image(data, url) do
+ case get_ttl_from_image(data, url) do
+ {:ok, ttl} when is_number(ttl) ->
+ ttl = ttl * 1000
+
+ case Cachex.expire_at(:rich_media_cache, url, ttl) do
+ {:ok, true} -> {:ok, ttl}
+ {:ok, false} -> {:error, :no_key}
+ end
+
_ ->
- {:ok, data}
+ {:ok, :noop}
end
end
defp get_ttl_from_image(data, url) do
- Pleroma.Config.get([:rich_media, :ttl_setters])
+ [:rich_media, :ttl_setters]
+ |> Pleroma.Config.get()
|> Enum.reduce({:ok, nil}, fn
module, {:ok, _ttl} ->
module.ttl(data, url)
end
defp parse_url(url) do
- try do
- {:ok, %Tesla.Env{body: html}} = Pleroma.Web.RichMedia.Helpers.rich_media_get(url)
-
+ with {:ok, %Tesla.Env{body: html}} <- Pleroma.Web.RichMedia.Helpers.rich_media_get(url),
+ {:ok, html} <- Floki.parse_document(html) do
html
- |> parse_html()
|> maybe_parse()
|> Map.put("url", url)
|> clean_parsed_data()
|> check_parsed_data()
- rescue
- e ->
- {:error, "Parsing error: #{inspect(e)} #{inspect(__STACKTRACE__)}"}
end
end
- defp parse_html(html), do: Floki.parse_document!(html)
-
defp maybe_parse(html) do
Enum.reduce_while(parsers(), %{}, fn parser, acc ->
case parser.parse(html, acc) do
|> parse_query_params()
|> format_query_params()
|> get_expiration_timestamp()
+ else
+ {:error, "Not aws signed url #{inspect(image)}"}
end
end
- defp is_aws_signed_url(""), do: nil
- defp is_aws_signed_url(nil), do: nil
-
- defp is_aws_signed_url(image) when is_binary(image) do
+ defp is_aws_signed_url(image) when is_binary(image) and image != "" do
%URI{host: host, query: query} = URI.parse(image)
- if String.contains?(host, "amazonaws.com") and String.contains?(query, "X-Amz-Expires") do
- image
- else
- nil
- end
+ String.contains?(host, "amazonaws.com") and String.contains?(query, "X-Amz-Expires")
end
defp is_aws_signed_url(_), do: nil
|> Map.get("X-Amz-Date")
|> Timex.parse("{ISO:Basic:Z}")
- Timex.to_unix(date) + String.to_integer(Map.get(params, "X-Amz-Expires"))
+ {:ok, Timex.to_unix(date) + String.to_integer(Map.get(params, "X-Amz-Expires"))}
end
end
def password_reset(nickname_or_email) do
with true <- is_binary(nickname_or_email),
- %User{local: true, email: email} = user when is_binary(email) <-
+ %User{local: true, email: email, deactivated: false} = user when is_binary(email) <-
User.get_by_nickname_or_email(nickname_or_email),
{:ok, token_record} <- Pleroma.PasswordResetToken.create_token(user) do
user
{:ok, :enqueued}
else
- false ->
- {:error, "bad user identifier"}
-
- %User{local: true, email: nil} ->
+ _ ->
{:ok, :noop}
-
- %User{local: false} ->
- {:error, "remote user"}
-
- nil ->
- {:error, "unknown user"}
end
end
def find_lrdd_template(domain) do
with {:ok, %{status: status, body: body}} when status in 200..299 <-
- HTTP.get("http://#{domain}/.well-known/host-meta", []) do
+ HTTP.get("http://#{domain}/.well-known/host-meta") do
get_template_from_xml(body)
else
_ ->
with {:ok, %{body: body, status: status}} when status in 200..299 <-
- HTTP.get("https://#{domain}/.well-known/host-meta", []) do
+ HTTP.get("https://#{domain}/.well-known/host-meta") do
get_template_from_xml(body)
else
e -> {:error, "Can't find LRDD template: #{inspect(e)}"}
--- /dev/null
+defmodule Pleroma.Repo.Migrations.EnsureBioIsString do
+ use Ecto.Migration
+
+ def change do
+ execute("update users set bio = '' where bio is null", "")
+ end
+end
--- /dev/null
+defmodule Pleroma.Repo.Migrations.BioSetNotNull do
+ use Ecto.Migration
+
+ def change do
+ execute(
+ "alter table users alter column bio set not null",
+ "alter table users alter column bio drop not null"
+ )
+ end
+end
{:ok, %Tesla.Env{status: 200, body: File.read!("test/fixtures/relay/relay.json")}}
end
- def get("http://localhost:4001/", _, "", Accept: "text/html") do
+ def get("http://localhost:4001/", _, "", [{"accept", "text/html"}]) do
{:ok, %Tesla.Env{status: 200, body: File.read!("test/fixtures/tesla_mock/7369654.html")}}
end
- def get("https://osada.macgirvin.com/", _, "", Accept: "text/html") do
+ def get("https://osada.macgirvin.com/", _, "", [{"accept", "text/html"}]) do
{:ok,
%Tesla.Env{
status: 200,
}
})
+ folder = Path.join([@dir, "frontends", "pleroma", "fantasy"])
+ previously_existing = Path.join([folder, "temp"])
+ File.mkdir_p!(folder)
+ File.write!(previously_existing, "yey")
+ assert File.exists?(previously_existing)
+
capture_io(fn ->
Frontend.run(["install", "pleroma", "--file", "test/fixtures/tesla_mock/frontend.zip"])
end)
- assert File.exists?(Path.join([@dir, "frontends", "pleroma", "fantasy", "test.txt"]))
+ assert File.exists?(Path.join([folder, "test.txt"]))
+ refute File.exists?(previously_existing)
end
test "it downloads and unzips unknown frontends" do
Enum.map(User.search("doe", resolve: false, for_user: u1), & &1.id) == []
end
- test "finds followers of user by partial name" do
- u1 = insert(:user)
- u2 = insert(:user, %{name: "Jimi"})
- follower_jimi = insert(:user, %{name: "Jimi Hendrix"})
- follower_lizz = insert(:user, %{name: "Lizz Wright"})
- friend = insert(:user, %{name: "Jimi"})
-
- {:ok, follower_jimi} = User.follow(follower_jimi, u1)
- {:ok, _follower_lizz} = User.follow(follower_lizz, u2)
- {:ok, u1} = User.follow(u1, friend)
-
- assert Enum.map(User.search("jimi", following: true, for_user: u1), & &1.id) == [
- follower_jimi.id
+ test "finds followings of user by partial name" do
+ lizz = insert(:user, %{name: "Lizz"})
+ jimi = insert(:user, %{name: "Jimi"})
+ following_lizz = insert(:user, %{name: "Jimi Hendrix"})
+ following_jimi = insert(:user, %{name: "Lizz Wright"})
+ follower_lizz = insert(:user, %{name: "Jimi"})
+
+ {:ok, lizz} = User.follow(lizz, following_lizz)
+ {:ok, _jimi} = User.follow(jimi, following_jimi)
+ {:ok, _follower_lizz} = User.follow(follower_lizz, lizz)
+
+ assert Enum.map(User.search("jimi", following: true, for_user: lizz), & &1.id) == [
+ following_lizz.id
]
- assert User.search("lizz", following: true, for_user: u1) == []
+ assert User.search("lizz", following: true, for_user: lizz) == []
end
test "find local and remote users for authenticated users" do
user = User.get_by_id(user.id)
assert %User{
- bio: nil,
+ bio: "",
raw_bio: nil,
email: nil,
name: nil,
assert user.note_count == 0
assert user.follower_count == 0
assert user.following_count == 0
- assert user.bio == nil
+ assert user.bio == ""
assert user.name == nil
assert called(Pleroma.Web.Federator.publish(:_))
{:ok, user: user}
end
- test "it returns 404 when user is not found", %{conn: conn, user: user} do
+ test "it returns 204 when user is not found", %{conn: conn, user: user} do
conn = post(conn, "/auth/password?email=nonexisting_#{user.email}")
- assert conn.status == 404
- assert conn.resp_body == ""
+
+ assert conn
+ |> json_response(:no_content)
end
- test "it returns 400 when user is not local", %{conn: conn, user: user} do
+ test "it returns 204 when user is not local", %{conn: conn, user: user} do
{:ok, user} = Repo.update(Ecto.Changeset.change(user, local: false))
conn = post(conn, "/auth/password?email=#{user.email}")
- assert conn.status == 400
- assert conn.resp_body == ""
+
+ assert conn
+ |> json_response(:no_content)
+ end
+
+ test "it returns 204 when user is deactivated", %{conn: conn, user: user} do
+ {:ok, user} = Repo.update(Ecto.Changeset.change(user, deactivated: true, local: true))
+ conn = post(conn, "/auth/password?email=#{user.email}")
+
+ assert conn
+ |> json_response(:no_content)
end
end
assert following == [other_user.follower_address]
end
- test "removing users from a list" do
+ test "removing users from a list, body params" do
%{user: user, conn: conn} = oauth_access(["write:lists"])
other_user = insert(:user)
third_user = insert(:user)
assert following == [third_user.follower_address]
end
+ test "removing users from a list, query params" do
+ %{user: user, conn: conn} = oauth_access(["write:lists"])
+ other_user = insert(:user)
+ third_user = insert(:user)
+ {:ok, list} = Pleroma.List.create("name", user)
+ {:ok, list} = Pleroma.List.follow(list, other_user)
+ {:ok, list} = Pleroma.List.follow(list, third_user)
+
+ assert %{} ==
+ conn
+ |> put_req_header("content-type", "application/json")
+ |> delete("/api/v1/lists/#{list.id}/accounts?account_ids[]=#{other_user.id}")
+ |> json_response_and_validate_schema(:ok)
+
+ %Pleroma.List{following: following} = Pleroma.List.get(list.id, user)
+ assert following == [third_user.follower_address]
+ end
+
test "listing users in a list" do
%{user: user, conn: conn} = oauth_access(["read:lists"])
other_user = insert(:user)
expire_time =
Timex.parse!(timestamp, "{ISO:Basic:Z}") |> Timex.to_unix() |> Kernel.+(valid_till)
- assert expire_time == Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl.ttl(metadata, url)
+ assert {:ok, expire_time} == Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl.ttl(metadata, url)
end
test "s3 signed url is parsed and correct ttl is set for rich media" do
Cachex.put(:rich_media_cache, url, metadata)
- Pleroma.Web.RichMedia.Parser.set_ttl_based_on_image({:ok, metadata}, url)
+ Pleroma.Web.RichMedia.Parser.set_ttl_based_on_image(metadata, url)
{:ok, cache_ttl} = Cachex.ttl(:rich_media_cache, url)
defmodule Pleroma.Web.RichMedia.ParserTest do
use ExUnit.Case, async: true
+ alias Pleroma.Web.RichMedia.Parser
+
setup do
Tesla.Mock.mock(fn
%{
%{method: :get, url: "http://example.com/empty"} ->
%Tesla.Env{status: 200, body: "hello"}
+
+ %{method: :get, url: "http://example.com/malformed"} ->
+ %Tesla.Env{status: 200, body: File.read!("test/fixtures/rich_media/malformed-data.html")}
+
+ %{method: :get, url: "http://example.com/error"} ->
+ {:error, :overload}
end)
:ok
end
test "returns error when no metadata present" do
- assert {:error, _} = Pleroma.Web.RichMedia.Parser.parse("http://example.com/empty")
+ assert {:error, _} = Parser.parse("http://example.com/empty")
end
test "doesn't just add a title" do
- assert Pleroma.Web.RichMedia.Parser.parse("http://example.com/non-ogp") ==
+ assert Parser.parse("http://example.com/non-ogp") ==
{:error,
"Found metadata was invalid or incomplete: %{\"url\" => \"http://example.com/non-ogp\"}"}
end
test "parses ogp" do
- assert Pleroma.Web.RichMedia.Parser.parse("http://example.com/ogp") ==
+ assert Parser.parse("http://example.com/ogp") ==
{:ok,
%{
"image" => "http://ia.media-imdb.com/images/rock.jpg",
end
test "falls back to <title> when ogp:title is missing" do
- assert Pleroma.Web.RichMedia.Parser.parse("http://example.com/ogp-missing-title") ==
+ assert Parser.parse("http://example.com/ogp-missing-title") ==
{:ok,
%{
"image" => "http://ia.media-imdb.com/images/rock.jpg",
end
test "parses twitter card" do
- assert Pleroma.Web.RichMedia.Parser.parse("http://example.com/twitter-card") ==
+ assert Parser.parse("http://example.com/twitter-card") ==
{:ok,
%{
"card" => "summary",
end
test "parses OEmbed" do
- assert Pleroma.Web.RichMedia.Parser.parse("http://example.com/oembed") ==
+ assert Parser.parse("http://example.com/oembed") ==
{:ok,
%{
"author_name" => "bees",
end
test "rejects invalid OGP data" do
- assert {:error, _} = Pleroma.Web.RichMedia.Parser.parse("http://example.com/malformed")
+ assert {:error, _} = Parser.parse("http://example.com/malformed")
+ end
+
+ test "returns error if getting page was not successful" do
+ assert {:error, :overload} = Parser.parse("http://example.com/error")
end
end
user = User.get_by_id(user.id)
assert user.deactivated == true
assert user.name == nil
- assert user.bio == nil
+ assert user.bio == ""
assert user.password_hash == nil
end
end