Add elasticsearch tests
authorFloatingGhost <hannah@coffee-and-dreams.uk>
Thu, 30 Jun 2022 15:53:21 +0000 (16:53 +0100)
committerFloatingGhost <hannah@coffee-and-dreams.uk>
Thu, 30 Jun 2022 15:53:21 +0000 (16:53 +0100)
lib/pleroma/search/elasticsearch/document_mappings/activity.ex
lib/pleroma/telemetry/logger.ex
test/pleroma/search/database_search_test.exs [moved from test/pleroma/search/database_search_test.ex with 100% similarity]
test/pleroma/search/elasticsearch_test.exs [new file with mode: 0644]
test/support/elasticsearch_mock.ex [new file with mode: 0644]

index edd8e03c1817b318b1ea2b4607d1d9efb22f3821..3a84e991b023f295fc561c86b58e54736b758a31 100644 (file)
@@ -53,3 +53,9 @@ defimpl Elasticsearch.Document, for: Pleroma.Activity do
     object_to_search_data(object)
   end
 end
+
+defimpl Elasticsearch.Document, for: Pleroma.Object do
+  def id(obj), do: obj.id
+  def routing(_), do: false
+  def encode(_), do: nil
+end
index 35e245237ec5abf5004b890df5e273eddd1ac247..50f7fcf2a7af3a7c0c6cefe6831bbdacb70e6cfd 100644 (file)
@@ -12,8 +12,7 @@ defmodule Pleroma.Telemetry.Logger do
     [:pleroma, :connection_pool, :reclaim, :stop],
     [:pleroma, :connection_pool, :provision_failure],
     [:pleroma, :connection_pool, :client, :dead],
-    [:pleroma, :connection_pool, :client, :add],
-    [:pleroma, :repo, :query]
+    [:pleroma, :connection_pool, :client, :add]
   ]
   def attach do
     :telemetry.attach_many(
@@ -93,64 +92,4 @@ defmodule Pleroma.Telemetry.Logger do
   end
 
   def handle_event([:pleroma, :connection_pool, :client, :add], _, _, _), do: :ok
-
-  def handle_event(
-        [:pleroma, :repo, :query] = _name,
-        %{query_time: query_time} = measurements,
-        %{source: source} = metadata,
-        config
-      ) do
-    logging_config = Pleroma.Config.get([:telemetry, :slow_queries_logging], [])
-
-    if logging_config[:enabled] &&
-         logging_config[:min_duration] &&
-         query_time > logging_config[:min_duration] and
-         (is_nil(logging_config[:exclude_sources]) or
-            source not in logging_config[:exclude_sources]) do
-      log_slow_query(measurements, metadata, config)
-    else
-      :ok
-    end
-  end
-
-  defp log_slow_query(
-         %{query_time: query_time} = _measurements,
-         %{source: _source, query: query, params: query_params, repo: repo} = _metadata,
-         _config
-       ) do
-    sql_explain =
-      with {:ok, %{rows: explain_result_rows}} <-
-             repo.query("EXPLAIN " <> query, query_params, log: false) do
-        Enum.map_join(explain_result_rows, "\n", & &1)
-      end
-
-    {:current_stacktrace, stacktrace} = Process.info(self(), :current_stacktrace)
-
-    pleroma_stacktrace =
-      Enum.filter(stacktrace, fn
-        {__MODULE__, _, _, _} ->
-          false
-
-        {mod, _, _, _} ->
-          mod
-          |> to_string()
-          |> String.starts_with?("Elixir.Pleroma.")
-      end)
-
-    Logger.warn(fn ->
-      """
-      Slow query!
-
-      Total time: #{round(query_time / 1_000)} ms
-
-      #{query}
-
-      #{inspect(query_params, limit: :infinity)}
-
-      #{sql_explain}
-
-      #{Exception.format_stacktrace(pleroma_stacktrace)}
-      """
-    end)
-  end
 end
diff --git a/test/pleroma/search/elasticsearch_test.exs b/test/pleroma/search/elasticsearch_test.exs
new file mode 100644 (file)
index 0000000..cc5eb67
--- /dev/null
@@ -0,0 +1,120 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Search.ElasticsearchTest do
+  require Pleroma.Constants
+
+  use Pleroma.DataCase
+  use Oban.Testing, repo: Pleroma.Repo
+
+  import Pleroma.Factory
+  import Tesla.Mock
+  import Mock
+
+  alias Pleroma.Web.CommonAPI
+  alias Pleroma.Workers.SearchIndexingWorker
+
+  describe "elasticsearch" do
+    setup do
+      clear_config([Pleroma.Search, :module], Pleroma.Search.Elasticsearch)
+      clear_config([Pleroma.Search.Elasticsearch.Cluster, :api], Pleroma.ElasticsearchMock)
+    end
+
+    setup_with_mocks(
+      [
+        {Pleroma.Search.Elasticsearch, [:passthrough],
+         [
+           add_to_index: fn a -> passthrough([a]) end,
+           remove_from_index: fn a -> passthrough([a]) end
+         ]},
+        {Elasticsearch, [:passthrough],
+         [
+           put_document: fn _, _, _ -> :ok end,
+           delete_document: fn _, _, _ -> :ok end
+         ]}
+      ],
+      context,
+      do: {:ok, context}
+    )
+
+    test "indexes a local post on creation" do
+      user = insert(:user)
+
+      {:ok, activity} =
+        CommonAPI.post(user, %{
+          status: "guys i just don't wanna leave the swamp",
+          visibility: "public"
+        })
+
+      args = %{"op" => "add_to_index", "activity" => activity.id}
+
+      assert_enqueued(
+        worker: SearchIndexingWorker,
+        args: args
+      )
+
+      assert :ok = perform_job(SearchIndexingWorker, args)
+
+      assert_called(Pleroma.Search.Elasticsearch.add_to_index(activity))
+    end
+
+    test "doesn't index posts that are not public" do
+      user = insert(:user)
+
+      Enum.each(["private", "direct"], fn visibility ->
+        {:ok, activity} =
+          CommonAPI.post(user, %{
+            status: "guys i just don't wanna leave the swamp",
+            visibility: visibility
+          })
+
+        args = %{"op" => "add_to_index", "activity" => activity.id}
+
+        assert_enqueued(worker: SearchIndexingWorker, args: args)
+        assert :ok = perform_job(SearchIndexingWorker, args)
+
+        assert_not_called(Elasticsearch.put_document(:_))
+      end)
+
+      history = call_history(Pleroma.Search.Elasticsearch)
+      assert Enum.count(history) == 2
+    end
+
+    test "deletes posts from index when deleted locally" do
+      user = insert(:user)
+
+      mock_global(fn
+        %{method: :put, url: "http://127.0.0.1:7700/indexes/objects/documents", body: body} ->
+          assert match?(
+                   [%{"content" => "guys i just don&#39;t wanna leave the swamp"}],
+                   Jason.decode!(body)
+                 )
+
+          json(%{updateId: 1})
+
+        %{method: :delete, url: "http://127.0.0.1:7700/indexes/objects/documents/" <> id} ->
+          assert String.length(id) > 1
+          json(%{updateId: 2})
+      end)
+
+      {:ok, activity} =
+        CommonAPI.post(user, %{
+          status: "guys i just don't wanna leave the swamp",
+          visibility: "public"
+        })
+
+      args = %{"op" => "add_to_index", "activity" => activity.id}
+      assert_enqueued(worker: SearchIndexingWorker, args: args)
+      assert :ok = perform_job(SearchIndexingWorker, args)
+
+      {:ok, _} = CommonAPI.delete(activity.id, user)
+
+      delete_args = %{"op" => "remove_from_index", "object" => activity.object.id}
+      assert_enqueued(worker: SearchIndexingWorker, args: delete_args)
+      assert :ok = perform_job(SearchIndexingWorker, delete_args)
+
+      assert_called(Pleroma.Search.Elasticsearch.remove_from_index(:_))
+    end
+  end
+end
diff --git a/test/support/elasticsearch_mock.ex b/test/support/elasticsearch_mock.ex
new file mode 100644 (file)
index 0000000..6e203f2
--- /dev/null
@@ -0,0 +1,14 @@
+defmodule Pleroma.ElasticsearchMock do
+  @behaviour Elasticsearch.API
+
+  @impl true
+  def request(_config, :get, "/posts/1", _data, _opts) do
+    {:ok,
+     %HTTPoison.Response{
+       status_code: 404,
+       body: %{
+         "status" => "not_found"
+       }
+     }}
+  end
+end