fix search media proxy urls
authorMaksim Pechnikov <parallel588@gmail.com>
Sun, 16 Aug 2020 03:54:48 +0000 (06:54 +0300)
committerMaksim Pechnikov <parallel588@gmail.com>
Sun, 16 Aug 2020 03:54:48 +0000 (06:54 +0300)
lib/pleroma/web/admin_api/controllers/media_proxy_cache_controller.ex
test/web/admin_api/controllers/media_proxy_cache_controller_test.exs

index 76d3af4efccac79edcd8a1fc32faf46bc6ae365a..131e22d78a03ac5a65fa1c3ab2f99a2dbaf17957 100644 (file)
@@ -38,18 +38,20 @@ defmodule Pleroma.Web.AdminAPI.MediaProxyCacheController do
 
   defp fetch_entries(params) do
     MediaProxy.cache_table()
-    |> Cachex.export!()
-    |> filter_urls(params[:query])
+    |> Cachex.stream!(Cachex.Query.create(true, :key))
+    |> filter_entries(params[:query])
   end
 
-  defp filter_urls(entries, query) when is_binary(query) do
-    for {_, url, _, _, _} <- entries, String.contains?(url, query), do: url
-  end
+  defp filter_entries(stream, query) when is_binary(query) do
+    regex = ~r/#{query}/i
 
-  defp filter_urls(entries, _) do
-    Enum.map(entries, fn {_, url, _, _, _} -> url end)
+    stream
+    |> Enum.filter(fn url -> String.match?(url, regex) end)
+    |> Enum.to_list()
   end
 
+  defp filter_entries(stream, _), do: Enum.to_list(stream)
+
   defp paginate_entries(entries, page, page_size) do
     offset = page_size * (page - 1)
     Enum.slice(entries, offset, page_size)
index 3cf98d7c7860843af60d04942fe7057431a2486b..f243d1fb29787508176b4cc6e687750456242a01 100644 (file)
@@ -89,7 +89,7 @@ defmodule Pleroma.Web.AdminAPI.MediaProxyCacheControllerTest do
 
       response =
         conn
-        |> get("/api/pleroma/admin/media_proxy_caches?page_size=2&query=f44")
+        |> get("/api/pleroma/admin/media_proxy_caches?page_size=2&query=F44")
         |> json_response_and_validate_schema(200)
 
       assert response["urls"] == [