- Config: HTTP timeout options, :pool\_timeout and :receive\_timeout
- Added statistic gathering about instances which do/don't have signed fetches when they request from us
- Ability to set a default post expiry time, after which the post will be deleted. If used in concert with ActivityExpiration MRF, the expiry which comes _sooner_ will be applied.
+- Regular task to prune local transient activities
+- Task to manually run the transient prune job (pleroma.database prune\_task)
## Changed
- MastoAPI: Accept BooleanLike input on `/api/v1/accounts/:id/follow` (fixes follows with mastodon.py)
- Relays from akkoma are now off by default
- NormalizeMarkup MRF is now on by default
- Follow/Block/Mute imports now spin off into *n* tasks to avoid the oban timeout
+- Transient activities recieved from remote servers are no longer persisted in the database
+
+## Upgrade Notes
+- If you have an old instance, you will probably want to run `mix pleroma.database prune_task` in the foreground to catch it up with the history of your instance.
## 2022.11
new_users_digest: 1,
mute_expire: 5,
search_indexing: 10,
- nodeinfo_fetcher: 1
+ nodeinfo_fetcher: 1,
+ database_prune: 1
],
plugins: [
Oban.Plugins.Pruner,
],
crontab: [
{"0 0 * * 0", Pleroma.Workers.Cron.DigestEmailsWorker},
- {"0 0 * * *", Pleroma.Workers.Cron.NewUsersDigestWorker}
+ {"0 0 * * *", Pleroma.Workers.Cron.NewUsersDigestWorker},
+ {"0 3 * * *", Pleroma.Workers.Cron.PruneDatabaseWorker}
]
config :pleroma, :workers,
new_users_digest: :timer.seconds(10),
mute_expire: :timer.seconds(5),
search_indexing: :timer.seconds(5),
- nodeinfo_fetcher: :timer.seconds(10)
+ nodeinfo_fetcher: :timer.seconds(10),
+ database_prune: :timer.minutes(10)
]
config :pleroma, Pleroma.Formatter,
```
See [PostgreSQL documentation](https://www.postgresql.org/docs/current/textsearch-configuration.html) and `docs/configuration/howto_search_cjk.md` for more detail.
+
+## Pruning old activities
+
+Over time, transient `Delete` activities and `Tombstone` objects
+can accumulate in your database, inflating its size. This is not ideal.
+There is a periodic task to prune these transient objects,
+but on first run this may take a while on older instances to catch up
+to the current day.
+
+=== "OTP"
+
+ ```sh
+ ./bin/pleroma_ctl database prune_task
+ ```
+
+=== "From Source"
+
+ ```sh
+ mix pleroma.database prune_task
+ ```
\ No newline at end of file
end
end
+ def run(["prune_task"]) do
+ start_pleroma()
+
+ nil
+ |> Pleroma.Workers.Cron.PruneDatabaseWorker.perform()
+ |> IO.inspect()
+ end
+
def run(["fix_likes_collections"]) do
start_pleroma()
--- /dev/null
+defmodule Pleroma.Activity.Pruner do
+ @moduledoc """
+ Prunes activities from the database.
+ """
+ @cutoff 30
+
+ alias Pleroma.Activity
+ alias Pleroma.Repo
+ import Ecto.Query
+
+ def prune_deletes do
+ before_time = cutoff()
+
+ from(a in Activity,
+ where: fragment("?->>'type' = ?", a.data, "Delete") and a.inserted_at < ^before_time
+ )
+ |> Repo.delete_all(timeout: :infinity)
+ end
+
+ def prune_undos do
+ before_time = cutoff()
+
+ from(a in Activity,
+ where: fragment("?->>'type' = ?", a.data, "Undo") and a.inserted_at < ^before_time
+ )
+ |> Repo.delete_all(timeout: :infinity)
+ end
+
+ def prune_removes do
+ before_time = cutoff()
+
+ from(a in Activity,
+ where: fragment("?->>'type' = ?", a.data, "Remove") and a.inserted_at < ^before_time
+ )
+ |> Repo.delete_all(timeout: :infinity)
+ end
+
+ defp cutoff do
+ DateTime.utc_now() |> Timex.shift(days: -@cutoff)
+ end
+end
--- /dev/null
+defmodule Pleroma.Object.Pruner do
+ @moduledoc """
+ Prunes objects from the database.
+ """
+ @cutoff 30
+
+ alias Pleroma.Object
+ alias Pleroma.Delivery
+ alias Pleroma.Repo
+ import Ecto.Query
+
+ def prune_tombstoned_deliveries do
+ from(d in Delivery)
+ |> join(:inner, [d], o in Object, on: d.object_id == o.id)
+ |> where([d, o], fragment("?->>'type' = ?", o.data, "Tombstone"))
+ |> Repo.delete_all(timeout: :infinity)
+ end
+
+ def prune_tombstones do
+ before_time = cutoff()
+
+ from(o in Object,
+ where: fragment("?->>'type' = ?", o.data, "Tombstone") and o.inserted_at < ^before_time
+ )
+ |> Repo.delete_all(timeout: :infinity, on_delete: :delete_all)
+ end
+
+ defp cutoff do
+ DateTime.utc_now() |> Timex.shift(days: -@cutoff)
+ end
+end
end
end
+ @unpersisted_activity_types ~w[Undo Delete Remove]
+ @impl true
+ def persist(%{"type" => type} = object, [local: false] = meta)
+ when type in @unpersisted_activity_types do
+ {:ok, object, meta}
+ {recipients, _, _} = get_recipients(object)
+
+ unpersisted = %Activity{
+ data: object,
+ local: false,
+ recipients: recipients,
+ actor: object["actor"]
+ }
+
+ {:ok, unpersisted, meta}
+ end
+
@impl true
def persist(object, meta) do
with local <- Keyword.fetch!(meta, :local),
# Tasks this handles:
# - Delete and unpins the create activity
- # - Replace object with Tombstone
# - Set up notification
# - Reduce the user note count
# - Reduce the reply count
|> Maps.put_if_present(:language, Pleroma.Web.Gettext.normalize_locale(params[:language]))
|> Maps.put_if_present(:status_ttl_days, params[:status_ttl_days], status_ttl_days_value)
- IO.inspect(user_params)
# What happens here:
#
# We want to update the user through the pipeline, but the ActivityPub
--- /dev/null
+defmodule Pleroma.Workers.Cron.PruneDatabaseWorker do
+ @moduledoc """
+ The worker to prune old data from the database.
+ """
+ require Logger
+ use Oban.Worker, queue: "database_prune"
+
+ alias Pleroma.Activity.Pruner, as: ActivityPruner
+ alias Pleroma.Object.Pruner, as: ObjectPruner
+
+ @impl Oban.Worker
+ def perform(_job) do
+ Logger.info("Pruning old data from the database")
+
+ Logger.info("Pruning old deletes")
+ ActivityPruner.prune_deletes()
+
+ Logger.info("Pruning old undos")
+ ActivityPruner.prune_undos()
+
+ Logger.info("Pruning old removes")
+ ActivityPruner.prune_removes()
+
+ Logger.info("Pruning old tombstone delivery entries")
+ ObjectPruner.prune_tombstoned_deliveries()
+
+ Logger.info("Pruning old tombstones")
+ ObjectPruner.prune_tombstones()
+
+ :ok
+ end
+end
end
def perform(%Job{args: %{"op" => "remove_from_index", "object" => object_id}}) do
- object = Pleroma.Object.get_by_id(object_id)
-
search_module = Pleroma.Config.get([Pleroma.Search, :module])
- search_module.remove_from_index(object)
+ # Fake the object so we can remove it from the index without having to keep it in the DB
+ search_module.remove_from_index(%Pleroma.Object{id: object_id})
:ok
end
--- /dev/null
+defmodule Pleroma.Repo.Migrations.AddNotificationActivityIdIndex do
+ use Ecto.Migration
+
+ def change do
+ create(index(:notifications, [:activity_id]))
+ end
+end
--- /dev/null
+defmodule Pleroma.Repo.Migrations.AddBookmarksActivityIdIndex do
+ use Ecto.Migration
+
+ def change do
+ create(index(:bookmarks, [:activity_id]))
+ end
+end
--- /dev/null
+defmodule Pleroma.Repo.Migrations.AddReportNotesActivityIdIndex do
+ use Ecto.Migration
+
+ def change do
+ create(index(:report_notes, [:activity_id]))
+ end
+end
--- /dev/null
+defmodule Pleroma.Repo.Migrations.AddCascadeToReportNotesOnActivityDelete do
+ use Ecto.Migration
+
+ def up do
+ drop(constraint(:report_notes, "report_notes_activity_id_fkey"))
+
+ alter table(:report_notes) do
+ modify(:activity_id, references(:activities, type: :uuid, on_delete: :delete_all))
+ end
+ end
+
+ def down do
+ drop(constraint(:report_notes, "report_notes_activity_id_fkey"))
+
+ alter table(:report_notes) do
+ modify(:activity_id, references(:activities, type: :uuid))
+ end
+ end
+end
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 794.02 449.34">
+ <defs>
+ <style>
+ .cls-1 {
+ fill: #fff;
+ }
+
+ .cls-2 {
+ fill: #2d2053;
+ }
+
+ .cls-3 {
+ fill: #462d7a;
+ }
+
+ .cls-4 {
+ stroke: #2c1e50;
+ }
+
+ .cls-4, .cls-5 {
+ stroke-miterlimit: 10;
+ }
+
+ .cls-5 {
+ stroke: #fff;
+ }
+
+ .cls-6 {
+ fill: #181127;
+ }
+ </style>
+ </defs>
+ <g id="Layer_2" data-name="Layer 2">
+ <path class="cls-3" d="M157.78,328.03c14.93,10.84,39.31-.17,41.84-1.23,17.72-7.43,29.58-23.57,49.21-50.87,13.76-19.14,15.81-25.23,20.91-24.94,15.15,.87,11.81,53.95,44.44,73.73,9.91,6.01,26.49,9.9,36.77,3.3,38.25-24.54,5.94-204.91-77.79-226.32-5-1.28-17.72-3.92-33.51,0-22.2,5.51-36.13,19.6-42.39,26.14-42.45,44.34-78.04,172.18-39.49,200.18Z"/>
+ </g>
+ <g id="Layer_7" data-name="Layer 7">
+ <path class="cls-2" d="M204.07,121.19c-1.95,2.08-2.59,2.62-4.05,4.07-3.72,3.69-6.99,6.64-7.27,7.66-2.34,8.62,150,55.54,152.87,47.02,.21-.62-.7-2.8-2.53-7.15,0,0-1.6-3.8-3.52-7.29-25.29-45.91-48.81-56.9-48.81-56.9-42.56-19.27-85.38,11.19-86.69,12.6Z"/>
+ </g>
+ <g id="Layer_9" data-name="Layer 9">
+ <path class="cls-4" d="M351.37,193.16c-5.77-11.54-85.59,16.83-154.76,27.39-21.09,3.22-38.13,4.31-47.3,4.75-.74,2.91-1.76,7.02-2.87,11.97-1.93,8.6-2.89,12.89-2.6,13.78,3.3,9.95,59.73-.88,99.18-7.64,32.67-5.6,115.14-18.96,114.61-30.77-.03-.69-1.11-4.01-3.27-10.65-1.78-5.47-2.67-8.2-2.98-8.83Z"/>
+ </g>
+ <g id="Layer_6" data-name="Layer 6">
+ <path class="cls-1" d="M253.58,138.31c-27.39-.52-46.38,38.21-37.98,54.55,10.09,19.62,65.5,18.26,74.77-3.3,7.21-16.78-11.38-50.77-36.79-51.24Z"/>
+ </g>
+ <g id="Layer_4" data-name="Layer 4">
+ <path d="M151,82.48c-6.55,27.74,252.45,113.97,267.56,89.66,9.24-14.87-64.9-83.62-163.53-97.57-39.06-5.52-100.95-5.14-104.03,7.91Z"/>
+ </g>
+ <g id="Layer_5" data-name="Layer 5">
+ <path class="cls-5" d="M221.03,89.73c.41-5.25,6.51-5.74,28.85-19.42,26.97-16.51,28.85-22.38,56.86-40.83,30.07-19.81,48.46-31.94,54.82-26.61,9.72,8.15-25.18,43.33-21.31,99.35,.87,12.61,3.12,17.79-.86,23.01-18.25,23.95-120.07-13.68-118.35-35.5Z"/>
+ <path class="cls-6" d="M791.6,449.34c3.22,0,3.22-5,0-5s-3.22,5,0,5h0Z"/>
+ </g>
+</svg>
\ No newline at end of file
--- /dev/null
+defmodule Pleroma.Activity.PrunerTest do
+ use Pleroma.DataCase, async: true
+
+ alias Pleroma.Activity
+ alias Pleroma.Activity.Pruner
+
+ import Pleroma.Factory
+
+ describe "prune_deletes" do
+ test "it prunes old delete objects" do
+ user = insert(:user)
+
+ new_delete = insert(:delete_activity, type: "Delete", user: user)
+
+ old_delete =
+ insert(:delete_activity,
+ type: "Delete",
+ user: user,
+ inserted_at: DateTime.utc_now() |> DateTime.add(-31 * 24, :hour)
+ )
+
+ Pruner.prune_deletes()
+ assert Activity.get_by_id(new_delete.id)
+ refute Activity.get_by_id(old_delete.id)
+ end
+ end
+end
--- /dev/null
+defmodule Pleroma.Object.PrunerTest do
+ use Pleroma.DataCase, async: true
+
+ alias Pleroma.Delivery
+ alias Pleroma.Object
+ alias Pleroma.Object.Pruner
+
+ import Pleroma.Factory
+
+ describe "prune_deletes" do
+ test "it prunes old delete objects" do
+ new_tombstone = insert(:tombstone)
+
+ old_tombstone =
+ insert(:tombstone,
+ inserted_at: DateTime.utc_now() |> DateTime.add(-31 * 24, :hour)
+ )
+
+ Pruner.prune_tombstones()
+ assert Object.get_by_id(new_tombstone.id)
+ refute Object.get_by_id(old_tombstone.id)
+ end
+ end
+
+ describe "prune_tombstoned_deliveries" do
+ test "it prunes old tombstone deliveries" do
+ user = insert(:user)
+
+ tombstone = insert(:tombstone)
+ tombstoned = insert(:delivery, object: tombstone, user: user)
+
+ note = insert(:note)
+ not_tombstoned = insert(:delivery, object: note, user: user)
+
+ Pruner.prune_tombstoned_deliveries()
+
+ refute Repo.get(Delivery, tombstoned.id)
+ assert Repo.get(Delivery, not_tombstoned.id)
+ end
+ end
+end
alias Pleroma.Activity
alias Pleroma.Builders.ActivityBuilder
+ alias Pleroma.Web.ActivityPub.Builder
alias Pleroma.Config
alias Pleroma.Notification
alias Pleroma.Object
{:ok, user} = ActivityPub.make_user_from_ap_id("https://princess.cat/users/mewmew")
assert user.name == " "
end
+
+ describe "persist/1" do
+ test "should not persist remote delete activities" do
+ poster = insert(:user, local: false)
+ {:ok, post} = CommonAPI.post(poster, %{status: "hhhhhh"})
+
+ {:ok, delete_data, meta} = Builder.delete(poster, post)
+ local_opts = Keyword.put(meta, :local, false)
+ {:ok, act, _meta} = ActivityPub.persist(delete_data, local_opts)
+ refute act.inserted_at
+ end
+
+ test "should not persist remote undo activities" do
+ poster = insert(:user, local: false)
+ liker = insert(:user, local: false)
+ {:ok, post} = CommonAPI.post(poster, %{status: "hhhhhh"})
+ {:ok, like} = CommonAPI.favorite(liker, post.id)
+
+ {:ok, undo_data, meta} = Builder.undo(liker, like)
+ local_opts = Keyword.put(meta, :local, false)
+ {:ok, act, _meta} = ActivityPub.persist(undo_data, local_opts)
+ refute act.inserted_at
+ end
+ end
end
test "does not allow negative integers other than -1 for TTL", %{conn: conn} do
conn = patch(conn, "/api/v1/accounts/update_credentials", %{"status_ttl_days" => "-2"})
- assert user_data = json_response_and_validate_schema(conn, 403)
+ assert json_response_and_validate_schema(conn, 403)
end
test "updates the user's AKAs", %{conn: conn} do
%Pleroma.Object{data: Map.merge(data, %{"type" => "Article"})}
end
- def tombstone_factory do
+ def tombstone_factory(attrs) do
data = %{
"type" => "Tombstone",
"id" => Pleroma.Web.ActivityPub.Utils.generate_object_id(),
%Pleroma.Object{
data: data
}
+ |> merge_attributes(attrs)
end
def question_factory(attrs \\ %{}) do
|> Map.merge(attrs)
end
+ def delete_activity_factory(attrs \\ %{}) do
+ user = attrs[:user] || insert(:user)
+ note_activity = attrs[:note_activity] || insert(:note_activity, user: user)
+
+ data_attrs = attrs[:data_attrs] || %{}
+ attrs = Map.drop(attrs, [:user, :data_attrs])
+
+ data =
+ %{
+ "id" => Pleroma.Web.ActivityPub.Utils.generate_activity_id(),
+ "type" => "Delete",
+ "actor" => note_activity.data["actor"],
+ "to" => note_activity.data["to"],
+ "object" => note_activity.data["id"],
+ "published" => DateTime.utc_now() |> DateTime.to_iso8601(),
+ "context" => note_activity.data["context"]
+ }
+ |> Map.merge(data_attrs)
+
+ %Pleroma.Activity{
+ data: data,
+ actor: data["actor"],
+ recipients: data["to"]
+ }
+ |> Map.merge(attrs)
+ end
+
def oauth_app_factory do
%Pleroma.Web.OAuth.App{
client_name: sequence(:client_name, &"Some client #{&1}"),
}
|> Map.merge(params)
end
+
+ def delivery_factory(params \\ %{}) do
+ object = Map.get(params, :object, build(:note))
+ user = Map.get(params, :user, build(:user))
+
+ %Pleroma.Delivery{
+ object: object,
+ user: user
+ }
+ end
end