Commit f6da12f4 authored by Maksim's avatar Maksim
Browse files

fix search media proxy urls

parent 56470647
Pipeline #29838 passed with stages
in 16 minutes and 58 seconds
...@@ -38,18 +38,20 @@ def index(%{assigns: %{user: _}} = conn, params) do ...@@ -38,18 +38,20 @@ def index(%{assigns: %{user: _}} = conn, params) do
defp fetch_entries(params) do defp fetch_entries(params) do
MediaProxy.cache_table() MediaProxy.cache_table()
|> Cachex.export!() |> Cachex.stream!(Cachex.Query.create(true, :key))
|> filter_urls(params[:query]) |> filter_entries(params[:query])
end end
defp filter_urls(entries, query) when is_binary(query) do defp filter_entries(stream, query) when is_binary(query) do
for {_, url, _, _, _} <- entries, String.contains?(url, query), do: url regex = ~r/#{query}/i
end
defp filter_urls(entries, _) do stream
Enum.map(entries, fn {_, url, _, _, _} -> url end) |> Enum.filter(fn url -> String.match?(url, regex) end)
|> Enum.to_list()
end end
defp filter_entries(stream, _), do: Enum.to_list(stream)
defp paginate_entries(entries, page, page_size) do defp paginate_entries(entries, page, page_size) do
offset = page_size * (page - 1) offset = page_size * (page - 1)
Enum.slice(entries, offset, page_size) Enum.slice(entries, offset, page_size)
......
...@@ -89,7 +89,7 @@ test "search banned MediaProxy URLs", %{conn: conn} do ...@@ -89,7 +89,7 @@ test "search banned MediaProxy URLs", %{conn: conn} do
response = response =
conn conn
|> get("/api/pleroma/admin/media_proxy_caches?page_size=2&query=f44") |> get("/api/pleroma/admin/media_proxy_caches?page_size=2&query=F44")
|> json_response_and_validate_schema(200) |> json_response_and_validate_schema(200)
assert response["urls"] == [ assert response["urls"] == [
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment