Merge remote-tracking branch 'remotes/origin/develop' into feature/object-hashtags...
[akkoma] / lib / pleroma / migrators / hashtags_table_migrator.ex
index 048f3c8ee49548db2309fd0adedff62f07b5d194..07b42a7f4d946ede09fc0813c884c34846009ece 100644 (file)
@@ -72,6 +72,8 @@ defmodule Pleroma.Migrators.HashtagsTableMigrator do
 
   @impl true
   def handle_info(:migrate_hashtags, state) do
+    State.clear()
+
     data_migration = data_migration()
 
     persistent_data = Map.take(data_migration.data, ["max_processed_id"])
@@ -80,6 +82,7 @@ defmodule Pleroma.Migrators.HashtagsTableMigrator do
       DataMigration.update(data_migration, %{state: :running, data: persistent_data})
 
     update_status(:running)
+    put_stat(:started_at, NaiveDateTime.utc_now())
 
     Logger.info("Starting transferring object embedded hashtags to `hashtags` table...")
 
@@ -108,8 +111,9 @@ defmodule Pleroma.Migrators.HashtagsTableMigrator do
 
       _ =
         Repo.query(
-          "DELETE FROM data_migration_failed_ids WHERE id = ANY($1)",
-          [object_ids -- failed_ids]
+          "DELETE FROM data_migration_failed_ids " <>
+            "WHERE data_migration_id = $1 AND record_id = ANY($2)",
+          [data_migration.id, object_ids -- failed_ids]
         )
 
       max_object_id = Enum.at(object_ids, -1)
@@ -118,6 +122,12 @@ defmodule Pleroma.Migrators.HashtagsTableMigrator do
       increment_stat(:processed_count, length(object_ids))
       increment_stat(:failed_count, length(failed_ids))
 
+      put_stat(
+        :records_per_second,
+        state()[:processed_count] /
+          Enum.max([NaiveDateTime.diff(NaiveDateTime.utc_now(), state()[:started_at]), 1])
+      )
+
       persist_stats(data_migration)
 
       # A quick and dirty approach to controlling the load this background migration imposes
@@ -126,12 +136,8 @@ defmodule Pleroma.Migrators.HashtagsTableMigrator do
     end)
     |> Stream.run()
 
-    with {:ok, %{rows: [[0]]}} <-
-           Repo.query(
-             "SELECT COUNT(record_id) FROM data_migration_failed_ids WHERE data_migration_id = $1;",
-             [data_migration.id]
-           ) do
-      _ = DataMigration.update_state(data_migration, :complete)
+    with 0 <- failures_count(data_migration.id) do
+      {:ok, data_migration} = DataMigration.update_state(data_migration, :complete)
 
       handle_success(data_migration)
     else
@@ -146,10 +152,9 @@ defmodule Pleroma.Migrators.HashtagsTableMigrator do
 
   defp query do
     # Note: most objects have Mention-type AS2 tags and no hashtags (but we can't filter them out)
+    # Note: not checking activity type; HashtagsCleanupWorker should clean up unused records later
     from(
       object in Object,
-      left_join: hashtag in assoc(object, :hashtags),
-      where: is_nil(hashtag.id),
       where:
         fragment("(?)->'tag' IS NOT NULL AND (?)->'tag' != '[]'::jsonb", object.data, object.data),
       select: %{
@@ -157,11 +162,24 @@ defmodule Pleroma.Migrators.HashtagsTableMigrator do
         tag: fragment("(?)->'tag'", object.data)
       }
     )
+    |> join(:left, [o], hashtags_objects in fragment("SELECT object_id FROM hashtags_objects"),
+      on: hashtags_objects.object_id == o.id
+    )
+    |> where([_o, hashtags_objects], is_nil(hashtags_objects.object_id))
   end
 
   defp transfer_object_hashtags(object) do
-    hashtags = Object.object_data_hashtags(%{"tag" => object.tag})
+    embedded_tags = if Map.has_key?(object, :tag), do: object.tag, else: object.data["tag"]
+    hashtags = Object.object_data_hashtags(%{"tag" => embedded_tags})
+
+    if Enum.any?(hashtags) do
+      transfer_object_hashtags(object, hashtags)
+    else
+      {:ok, object.id}
+    end
+  end
 
+  defp transfer_object_hashtags(object, hashtags) do
     Repo.transaction(fn ->
       with {:ok, hashtag_records} <- Hashtag.get_or_create_by_names(hashtags) do
         for hashtag_record <- hashtag_records do
@@ -192,13 +210,18 @@ defmodule Pleroma.Migrators.HashtagsTableMigrator do
     end)
   end
 
-  def count(force \\ false) do
+  @doc "Approximate count for current iteration (including processed records count)"
+  def count(force \\ false, timeout \\ :infinity) do
     stored_count = state()[:count]
 
     if stored_count && !force do
       stored_count
     else
-      count = Repo.aggregate(query(), :count, :id)
+      processed_count = state()[:processed_count] || 0
+      max_processed_id = data_migration().data["max_processed_id"] || 0
+      query = where(query(), [object], object.id > ^max_processed_id)
+
+      count = Repo.aggregate(query, :count, :id, timeout: timeout) + processed_count
       put_stat(:count, count)
       count
     end
@@ -234,6 +257,36 @@ defmodule Pleroma.Migrators.HashtagsTableMigrator do
     |> order_by([o], asc: o.id)
   end
 
+  def failures_count(data_migration_id \\ nil) do
+    data_migration_id = data_migration_id || data_migration().id
+
+    with {:ok, %{rows: [[count]]}} <-
+           Repo.query(
+             "SELECT COUNT(record_id) FROM data_migration_failed_ids WHERE data_migration_id = $1;",
+             [data_migration_id]
+           ) do
+      count
+    end
+  end
+
+  def retry_failed do
+    data_migration = data_migration()
+
+    failed_objects_query()
+    |> Repo.chunk_stream(100, :one)
+    |> Stream.each(fn object ->
+      with {:ok, _} <- transfer_object_hashtags(object) do
+        _ =
+          Repo.query(
+            "DELETE FROM data_migration_failed_ids " <>
+              "WHERE data_migration_id = $1 AND record_id = $2",
+            [data_migration.id, object.id]
+          )
+      end
+    end)
+    |> Stream.run()
+  end
+
   def force_continue do
     send(whereis(), :migrate_hashtags)
   end
@@ -243,6 +296,12 @@ defmodule Pleroma.Migrators.HashtagsTableMigrator do
     force_continue()
   end
 
+  def force_complete do
+    {:ok, data_migration} = DataMigration.update_state(data_migration(), :complete)
+
+    handle_success(data_migration)
+  end
+
   defp update_status(status, message \\ nil) do
     put_stat(:status, status)
     put_stat(:message, message)