defmodule Mix.Tasks.Pleroma.Database do
alias Pleroma.Conversation
+ alias Pleroma.Hashtag
alias Pleroma.Maintenance
alias Pleroma.Object
alias Pleroma.Repo
alias Pleroma.User
+
require Logger
require Pleroma.Constants
+
import Ecto.Query
import Mix.Pleroma
+
use Mix.Task
@shortdoc "A collection of database related tasks"
def run(["update_users_following_followers_counts"]) do
start_pleroma()
- User
- |> Repo.all()
- |> Enum.each(&User.update_follower_count/1)
+ Repo.transaction(
+ fn ->
+ from(u in User, select: u)
+ |> Repo.stream()
+ |> Stream.each(&User.update_follower_count/1)
+ |> Stream.run()
+ end,
+ timeout: :infinity
+ )
end
def run(["prune_objects" | args]) do
where: fragment("(?)->>'likes' is not null", object.data),
select: %{id: object.id, likes: fragment("(?)->>'likes'", object.data)}
)
- |> Pleroma.RepoStreamer.chunk_stream(100)
+ |> Pleroma.Repo.chunk_stream(100, :batches)
|> Stream.each(fn objects ->
ids =
objects
|> Stream.run()
end
+ def run(["transfer_hashtags"]) do
+ import Ecto.Query
+
+ start_pleroma()
+
+ from(
+ object in Object,
+ left_join: hashtag in assoc(object, :hashtags),
+ where: is_nil(hashtag.id),
+ where: fragment("(?)->>'tag' != '[]'", object.data),
+ select: %{
+ id: object.id,
+ inserted_at: object.inserted_at,
+ tag: fragment("(?)->>'tag'", object.data)
+ },
+ order_by: [desc: object.id]
+ )
+ |> Pleroma.Repo.chunk_stream(100, :batches)
+ |> Stream.each(fn objects ->
+ chunk_start = List.first(objects)
+ chunk_end = List.last(objects)
+
+ Logger.info(
+ "transfer_hashtags: " <>
+ "#{chunk_start.id} (#{chunk_start.inserted_at}) -- " <>
+ "#{chunk_end.id} (#{chunk_end.inserted_at})"
+ )
+
+ Enum.map(
+ objects,
+ fn object ->
+ hashtags =
+ object.tag
+ |> Jason.decode!()
+ |> Enum.filter(&is_bitstring(&1))
+
+ with {:ok, hashtag_records} <- Hashtag.get_or_create_by_names(hashtags) do
+ Repo.transaction(fn ->
+ for hashtag_record <- hashtag_records do
+ with {:error, _} <-
+ Ecto.Adapters.SQL.query(
+ Repo,
+ "insert into hashtags_objects(hashtag_id, object_id) values " <>
+ "(#{hashtag_record.id}, #{object.id});"
+ ) do
+ Logger.warn(
+ "ERROR: could not link object #{object.id} and hashtag #{hashtag_record.id}"
+ )
+ end
+ end
+ end)
+ else
+ e -> Logger.warn("ERROR: could not process object #{object.id}: #{inspect(e)}")
+ end
+ end
+ )
+ end)
+ |> Stream.run()
+ end
+
def run(["vacuum", args]) do
start_pleroma()
days = Pleroma.Config.get([:mrf_activity_expiration, :days], 365)
Pleroma.Activity
- |> join(:left, [a], u in assoc(a, :expiration))
+ |> join(:inner, [a], o in Object,
+ on:
+ fragment(
+ "(?->>'id') = COALESCE((?)->'object'->> 'id', (?)->>'object')",
+ o.data,
+ a.data,
+ a.data
+ )
+ )
|> where(local: true)
- |> where([a, u], is_nil(u))
- |> Pleroma.RepoStreamer.chunk_stream(100)
+ |> where([a], fragment("(? ->> 'type'::text) = 'Create'", a.data))
+ |> where([_a, o], fragment("?->>'type' = 'Note'", o.data))
+ |> Pleroma.Repo.chunk_stream(100, :batches)
|> Stream.each(fn activities ->
Enum.each(activities, fn activity ->
- expires_at = Timex.shift(activity.inserted_at, days: days)
-
- Pleroma.ActivityExpiration.create(activity, expires_at, false)
+ expires_at =
+ activity.inserted_at
+ |> DateTime.from_naive!("Etc/UTC")
+ |> Timex.shift(days: days)
+
+ Pleroma.Workers.PurgeExpiredActivity.enqueue(%{
+ activity_id: activity.id,
+ expires_at: expires_at
+ })
end)
end)
|> Stream.run()